Test Report: Docker_Linux_containerd_arm64 20535

                    
                      f30cb3cfe346a634e035681bc4eff951ae572c17:2025-03-17:38751
                    
                

Test fail (3/226)

Order failed test Duration
230 TestMultiNode/serial/DeployApp2Nodes 698.78
278 TestPause/serial/Start 632.82
341 TestNetworkPlugins/group/calico/Start 7200.061
x
+
TestMultiNode/serial/DeployApp2Nodes (698.78s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeployApp2Nodes
multinode_test.go:493: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- apply -f ./testdata/multinodes/multinode-pod-dns-test.yaml
multinode_test.go:498: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- rollout status deployment/busybox
E0317 10:54:17.247593    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:55:19.262218    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:56:42.328861    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:59:17.247073    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 11:00:19.262726    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:498: (dbg) Non-zero exit: out/minikube-linux-arm64 kubectl -p multinode-286863 -- rollout status deployment/busybox: exit status 1 (10m4.520384448s)

                                                
                                                
-- stdout --
	Waiting for deployment "busybox" rollout to finish: 0 of 2 updated replicas are available...
	Waiting for deployment "busybox" rollout to finish: 1 of 2 updated replicas are available...

                                                
                                                
-- /stdout --
** stderr ** 
	error: deployment "busybox" exceeded its progress deadline

                                                
                                                
** /stderr **
multinode_test.go:500: failed to deploy busybox to multinode cluster
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:12.179683    7572 retry.go:31] will retry after 982.952762ms: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:13.316404    7572 retry.go:31] will retry after 1.976808656s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:15.445988    7572 retry.go:31] will retry after 3.137315067s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:18.740717    7572 retry.go:31] will retry after 3.730374709s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:22.620973    7572 retry.go:31] will retry after 3.214420921s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:25.980539    7572 retry.go:31] will retry after 7.394537253s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:33.526784    7572 retry.go:31] will retry after 16.496411752s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:02:50.170010    7572 retry.go:31] will retry after 18.275795855s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0317 11:03:08.592465    7572 retry.go:31] will retry after 32.684859277s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:528: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-6q5tk -- nslookup kubernetes.io
multinode_test.go:536: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-kn9lf -- nslookup kubernetes.io
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-6q5tk -- nslookup kubernetes.default
multinode_test.go:546: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-kn9lf -- nslookup kubernetes.default
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-6q5tk -- nslookup kubernetes.default.svc.cluster.local
multinode_test.go:554: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-kn9lf -- nslookup kubernetes.default.svc.cluster.local
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestMultiNode/serial/DeployApp2Nodes]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect multinode-286863
helpers_test.go:235: (dbg) docker inspect multinode-286863:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f",
	        "Created": "2025-03-17T10:51:08.722802417Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 122057,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2025-03-17T10:51:08.78443802Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:df0c2544fb3106b890f0a9ab81fcf49f97edb092b83e47f42288ad5dfe1f4b40",
	        "ResolvConfPath": "/var/lib/docker/containers/012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f/hostname",
	        "HostsPath": "/var/lib/docker/containers/012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f/hosts",
	        "LogPath": "/var/lib/docker/containers/012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f/012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f-json.log",
	        "Name": "/multinode-286863",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "multinode-286863:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "multinode-286863",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2306867200,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4613734400,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "ID": "012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f",
	                "LowerDir": "/var/lib/docker/overlay2/e3e69e3ec0dda01b21dcab149f3919705100266f7e0f448a469ae7840502a376-init/diff:/var/lib/docker/overlay2/c96583d021b8dd172f2992413e93a0a3e28934ab88e8d005e42772deac52d50d/diff",
	                "MergedDir": "/var/lib/docker/overlay2/e3e69e3ec0dda01b21dcab149f3919705100266f7e0f448a469ae7840502a376/merged",
	                "UpperDir": "/var/lib/docker/overlay2/e3e69e3ec0dda01b21dcab149f3919705100266f7e0f448a469ae7840502a376/diff",
	                "WorkDir": "/var/lib/docker/overlay2/e3e69e3ec0dda01b21dcab149f3919705100266f7e0f448a469ae7840502a376/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "multinode-286863",
	                "Source": "/var/lib/docker/volumes/multinode-286863/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "multinode-286863",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "multinode-286863",
	                "name.minikube.sigs.k8s.io": "multinode-286863",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "5a38ea047a2cc5b0cd103129cc500fb2fd66c5444c5dc0708481d255ef2f2700",
	            "SandboxKey": "/var/run/docker/netns/5a38ea047a2c",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "32908"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "32909"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "32912"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "32910"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "32911"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "multinode-286863": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.67.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "f6:6b:31:36:ca:6a",
	                    "DriverOpts": null,
	                    "GwPriority": 0,
	                    "NetworkID": "afee3b0bb0e9b57acdf5075db6140c2b231b702b9df69ac64220deeb0b53108a",
	                    "EndpointID": "0bbe8307abdedb606c352c5bfc6b1278b7068546a2e353a15816621efce9ee80",
	                    "Gateway": "192.168.67.1",
	                    "IPAddress": "192.168.67.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "multinode-286863",
	                        "012f3f8578a8"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p multinode-286863 -n multinode-286863
helpers_test.go:244: <<< TestMultiNode/serial/DeployApp2Nodes FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/DeployApp2Nodes]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p multinode-286863 logs -n 25: (1.4906576s)
helpers_test.go:252: TestMultiNode/serial/DeployApp2Nodes logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| Command |                       Args                        |       Profile        |  User   | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| stop    | -p mount-start-2-409903                           | mount-start-2-409903 | jenkins | v1.35.0 | 17 Mar 25 10:50 UTC | 17 Mar 25 10:50 UTC |
	| start   | -p mount-start-2-409903                           | mount-start-2-409903 | jenkins | v1.35.0 | 17 Mar 25 10:50 UTC | 17 Mar 25 10:51 UTC |
	| ssh     | mount-start-2-409903 ssh -- ls                    | mount-start-2-409903 | jenkins | v1.35.0 | 17 Mar 25 10:51 UTC | 17 Mar 25 10:51 UTC |
	|         | /minikube-host                                    |                      |         |         |                     |                     |
	| delete  | -p mount-start-2-409903                           | mount-start-2-409903 | jenkins | v1.35.0 | 17 Mar 25 10:51 UTC | 17 Mar 25 10:51 UTC |
	| delete  | -p mount-start-1-407958                           | mount-start-1-407958 | jenkins | v1.35.0 | 17 Mar 25 10:51 UTC | 17 Mar 25 10:51 UTC |
	| start   | -p multinode-286863                               | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 10:51 UTC | 17 Mar 25 10:52 UTC |
	|         | --wait=true --memory=2200                         |                      |         |         |                     |                     |
	|         | --nodes=2 -v=8                                    |                      |         |         |                     |                     |
	|         | --alsologtostderr                                 |                      |         |         |                     |                     |
	|         | --driver=docker                                   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd                    |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- apply -f                   | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 10:52 UTC | 17 Mar 25 10:52 UTC |
	|         | ./testdata/multinodes/multinode-pod-dns-test.yaml |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- rollout                    | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 10:52 UTC |                     |
	|         | status deployment/busybox                         |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:02 UTC | 17 Mar 25 11:02 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- get pods -o                | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- exec                       | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | busybox-58667487b6-6q5tk --                       |                      |         |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- exec                       | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | busybox-58667487b6-kn9lf --                       |                      |         |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- exec                       | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | busybox-58667487b6-6q5tk --                       |                      |         |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- exec                       | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | busybox-58667487b6-kn9lf --                       |                      |         |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- exec                       | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | busybox-58667487b6-6q5tk -- nslookup              |                      |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |         |         |                     |                     |
	| kubectl | -p multinode-286863 -- exec                       | multinode-286863     | jenkins | v1.35.0 | 17 Mar 25 11:03 UTC | 17 Mar 25 11:03 UTC |
	|         | busybox-58667487b6-kn9lf -- nslookup              |                      |         |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |         |         |                     |                     |
	|---------|---------------------------------------------------|----------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/03/17 10:51:03
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.24.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0317 10:51:03.370158  121672 out.go:345] Setting OutFile to fd 1 ...
	I0317 10:51:03.370354  121672 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:51:03.370388  121672 out.go:358] Setting ErrFile to fd 2...
	I0317 10:51:03.370409  121672 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:51:03.370804  121672 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 10:51:03.371320  121672 out.go:352] Setting JSON to false
	I0317 10:51:03.372222  121672 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":2009,"bootTime":1742206655,"procs":165,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1077-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0317 10:51:03.372328  121672 start.go:139] virtualization:  
	I0317 10:51:03.375783  121672 out.go:177] * [multinode-286863] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	I0317 10:51:03.379562  121672 out.go:177]   - MINIKUBE_LOCATION=20535
	I0317 10:51:03.379645  121672 notify.go:220] Checking for updates...
	I0317 10:51:03.385437  121672 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0317 10:51:03.388342  121672 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:51:03.391139  121672 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	I0317 10:51:03.394031  121672 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0317 10:51:03.396920  121672 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0317 10:51:03.400059  121672 driver.go:394] Setting default libvirt URI to qemu:///system
	I0317 10:51:03.431533  121672 docker.go:123] docker version: linux-28.0.1:Docker Engine - Community
	I0317 10:51:03.431663  121672 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:51:03.503803  121672 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:23 OomKillDisable:true NGoroutines:42 SystemTime:2025-03-17 10:51:03.493199487 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:51:03.503931  121672 docker.go:318] overlay module found
	I0317 10:51:03.507173  121672 out.go:177] * Using the docker driver based on user configuration
	I0317 10:51:03.510152  121672 start.go:297] selected driver: docker
	I0317 10:51:03.510178  121672 start.go:901] validating driver "docker" against <nil>
	I0317 10:51:03.510193  121672 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0317 10:51:03.510954  121672 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:51:03.565534  121672 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:23 OomKillDisable:true NGoroutines:42 SystemTime:2025-03-17 10:51:03.556189543 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:51:03.565704  121672 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0317 10:51:03.565940  121672 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0317 10:51:03.568803  121672 out.go:177] * Using Docker driver with root privileges
	I0317 10:51:03.571698  121672 cni.go:84] Creating CNI manager for ""
	I0317 10:51:03.571796  121672 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0317 10:51:03.571814  121672 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0317 10:51:03.571932  121672 start.go:340] cluster config:
	{Name:multinode-286863 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP:
SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 10:51:03.575120  121672 out.go:177] * Starting "multinode-286863" primary control-plane node in "multinode-286863" cluster
	I0317 10:51:03.577914  121672 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0317 10:51:03.580829  121672 out.go:177] * Pulling base image v0.0.46-1741860993-20523 ...
	I0317 10:51:03.583643  121672 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
	I0317 10:51:03.583703  121672 preload.go:146] Found local preload: /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4
	I0317 10:51:03.583716  121672 cache.go:56] Caching tarball of preloaded images
	I0317 10:51:03.583738  121672 image.go:81] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon
	I0317 10:51:03.583801  121672 preload.go:172] Found /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0317 10:51:03.583812  121672 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on containerd
	I0317 10:51:03.584179  121672 profile.go:143] Saving config to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/config.json ...
	I0317 10:51:03.584209  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/config.json: {Name:mka039d43cda19513ece6c098e806752b96ea93a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:03.603813  121672 image.go:100] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon, skipping pull
	I0317 10:51:03.603838  121672 cache.go:145] gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 exists in daemon, skipping load
	I0317 10:51:03.603858  121672 cache.go:230] Successfully downloaded all kic artifacts
	I0317 10:51:03.603892  121672 start.go:360] acquireMachinesLock for multinode-286863: {Name:mk54425ef4843b6c8261c29ca5788ceb97884bf5 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0317 10:51:03.604011  121672 start.go:364] duration metric: took 97.453µs to acquireMachinesLock for "multinode-286863"
	I0317 10:51:03.604041  121672 start.go:93] Provisioning new machine with config: &{Name:multinode-286863 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetri
cs:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0317 10:51:03.604117  121672 start.go:125] createHost starting for "" (driver="docker")
	I0317 10:51:03.607537  121672 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0317 10:51:03.607760  121672 start.go:159] libmachine.API.Create for "multinode-286863" (driver="docker")
	I0317 10:51:03.607797  121672 client.go:168] LocalClient.Create starting
	I0317 10:51:03.607868  121672 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem
	I0317 10:51:03.607910  121672 main.go:141] libmachine: Decoding PEM data...
	I0317 10:51:03.607926  121672 main.go:141] libmachine: Parsing certificate...
	I0317 10:51:03.607979  121672 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem
	I0317 10:51:03.607995  121672 main.go:141] libmachine: Decoding PEM data...
	I0317 10:51:03.608005  121672 main.go:141] libmachine: Parsing certificate...
	I0317 10:51:03.608375  121672 cli_runner.go:164] Run: docker network inspect multinode-286863 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0317 10:51:03.624591  121672 cli_runner.go:211] docker network inspect multinode-286863 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0317 10:51:03.624676  121672 network_create.go:284] running [docker network inspect multinode-286863] to gather additional debugging logs...
	I0317 10:51:03.624691  121672 cli_runner.go:164] Run: docker network inspect multinode-286863
	W0317 10:51:03.640429  121672 cli_runner.go:211] docker network inspect multinode-286863 returned with exit code 1
	I0317 10:51:03.640461  121672 network_create.go:287] error running [docker network inspect multinode-286863]: docker network inspect multinode-286863: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network multinode-286863 not found
	I0317 10:51:03.640475  121672 network_create.go:289] output of [docker network inspect multinode-286863]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network multinode-286863 not found
	
	** /stderr **
	I0317 10:51:03.640576  121672 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0317 10:51:03.655837  121672 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-e774881651be IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:12:df:c7:61:5e:f1} reservation:<nil>}
	I0317 10:51:03.656069  121672 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-d6aab97fa8ac IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:d6:0d:b2:7e:8e:d0} reservation:<nil>}
	I0317 10:51:03.656379  121672 network.go:206] using free private subnet 192.168.67.0/24: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400199b350}
	I0317 10:51:03.656405  121672 network_create.go:124] attempt to create docker network multinode-286863 192.168.67.0/24 with gateway 192.168.67.1 and MTU of 1500 ...
	I0317 10:51:03.656470  121672 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.67.0/24 --gateway=192.168.67.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=multinode-286863 multinode-286863
	I0317 10:51:03.716904  121672 network_create.go:108] docker network multinode-286863 192.168.67.0/24 created
	I0317 10:51:03.716931  121672 kic.go:121] calculated static IP "192.168.67.2" for the "multinode-286863" container
	I0317 10:51:03.717002  121672 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0317 10:51:03.732849  121672 cli_runner.go:164] Run: docker volume create multinode-286863 --label name.minikube.sigs.k8s.io=multinode-286863 --label created_by.minikube.sigs.k8s.io=true
	I0317 10:51:03.750811  121672 oci.go:103] Successfully created a docker volume multinode-286863
	I0317 10:51:03.750935  121672 cli_runner.go:164] Run: docker run --rm --name multinode-286863-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-286863 --entrypoint /usr/bin/test -v multinode-286863:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -d /var/lib
	I0317 10:51:04.282334  121672 oci.go:107] Successfully prepared a docker volume multinode-286863
	I0317 10:51:04.282384  121672 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
	I0317 10:51:04.282403  121672 kic.go:194] Starting extracting preloaded images to volume ...
	I0317 10:51:04.282483  121672 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-286863:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -I lz4 -xf /preloaded.tar -C /extractDir
	I0317 10:51:08.654601  121672 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-286863:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -I lz4 -xf /preloaded.tar -C /extractDir: (4.372078207s)
	I0317 10:51:08.654635  121672 kic.go:203] duration metric: took 4.372227788s to extract preloaded images to volume ...
	W0317 10:51:08.654787  121672 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0317 10:51:08.654942  121672 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0317 10:51:08.708096  121672 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-286863 --name multinode-286863 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-286863 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-286863 --network multinode-286863 --ip 192.168.67.2 --volume multinode-286863:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185
	I0317 10:51:09.023972  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Running}}
	I0317 10:51:09.047058  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 10:51:09.074433  121672 cli_runner.go:164] Run: docker exec multinode-286863 stat /var/lib/dpkg/alternatives/iptables
	I0317 10:51:09.127553  121672 oci.go:144] the created container "multinode-286863" has a running status.
	I0317 10:51:09.127593  121672 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa...
	I0317 10:51:09.570164  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0317 10:51:09.570282  121672 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0317 10:51:09.604781  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 10:51:09.634740  121672 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0317 10:51:09.634760  121672 kic_runner.go:114] Args: [docker exec --privileged multinode-286863 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0317 10:51:09.687166  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 10:51:09.713277  121672 machine.go:93] provisionDockerMachine start ...
	I0317 10:51:09.713362  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:09.736499  121672 main.go:141] libmachine: Using SSH client type: native
	I0317 10:51:09.736823  121672 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 32908 <nil> <nil>}
	I0317 10:51:09.736833  121672 main.go:141] libmachine: About to run SSH command:
	hostname
	I0317 10:51:09.870404  121672 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-286863
	
	I0317 10:51:09.870431  121672 ubuntu.go:169] provisioning hostname "multinode-286863"
	I0317 10:51:09.870549  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:09.896754  121672 main.go:141] libmachine: Using SSH client type: native
	I0317 10:51:09.897071  121672 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 32908 <nil> <nil>}
	I0317 10:51:09.897089  121672 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-286863 && echo "multinode-286863" | sudo tee /etc/hostname
	I0317 10:51:10.046816  121672 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-286863
	
	I0317 10:51:10.047058  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:10.072418  121672 main.go:141] libmachine: Using SSH client type: native
	I0317 10:51:10.072714  121672 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 32908 <nil> <nil>}
	I0317 10:51:10.072731  121672 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-286863' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-286863/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-286863' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0317 10:51:10.199526  121672 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0317 10:51:10.199558  121672 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/20535-2262/.minikube CaCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/20535-2262/.minikube}
	I0317 10:51:10.199650  121672 ubuntu.go:177] setting up certificates
	I0317 10:51:10.199676  121672 provision.go:84] configureAuth start
	I0317 10:51:10.199748  121672 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863
	I0317 10:51:10.221609  121672 provision.go:143] copyHostCerts
	I0317 10:51:10.221648  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem
	I0317 10:51:10.221677  121672 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem, removing ...
	I0317 10:51:10.221684  121672 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem
	I0317 10:51:10.221744  121672 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem (1078 bytes)
	I0317 10:51:10.221818  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem
	I0317 10:51:10.221837  121672 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem, removing ...
	I0317 10:51:10.221841  121672 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem
	I0317 10:51:10.221867  121672 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem (1123 bytes)
	I0317 10:51:10.221908  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem
	I0317 10:51:10.221927  121672 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem, removing ...
	I0317 10:51:10.221931  121672 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem
	I0317 10:51:10.221955  121672 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem (1679 bytes)
	I0317 10:51:10.222003  121672 provision.go:117] generating server cert: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem org=jenkins.multinode-286863 san=[127.0.0.1 192.168.67.2 localhost minikube multinode-286863]
	I0317 10:51:10.384237  121672 provision.go:177] copyRemoteCerts
	I0317 10:51:10.384305  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0317 10:51:10.384345  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:10.401444  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:51:10.492343  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0317 10:51:10.492409  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem --> /etc/docker/server.pem (1216 bytes)
	I0317 10:51:10.517773  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0317 10:51:10.517855  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0317 10:51:10.542035  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0317 10:51:10.542114  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0317 10:51:10.566175  121672 provision.go:87] duration metric: took 366.482701ms to configureAuth
	I0317 10:51:10.566201  121672 ubuntu.go:193] setting minikube options for container-runtime
	I0317 10:51:10.566389  121672 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:51:10.566409  121672 machine.go:96] duration metric: took 853.10202ms to provisionDockerMachine
	I0317 10:51:10.566416  121672 client.go:171] duration metric: took 6.95861212s to LocalClient.Create
	I0317 10:51:10.566439  121672 start.go:167] duration metric: took 6.958680553s to libmachine.API.Create "multinode-286863"
	I0317 10:51:10.566448  121672 start.go:293] postStartSetup for "multinode-286863" (driver="docker")
	I0317 10:51:10.566456  121672 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0317 10:51:10.566505  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0317 10:51:10.566547  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:10.583394  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:51:10.672475  121672 ssh_runner.go:195] Run: cat /etc/os-release
	I0317 10:51:10.675723  121672 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.5 LTS"
	I0317 10:51:10.675744  121672 command_runner.go:130] > NAME="Ubuntu"
	I0317 10:51:10.675751  121672 command_runner.go:130] > VERSION_ID="22.04"
	I0317 10:51:10.675757  121672 command_runner.go:130] > VERSION="22.04.5 LTS (Jammy Jellyfish)"
	I0317 10:51:10.675763  121672 command_runner.go:130] > VERSION_CODENAME=jammy
	I0317 10:51:10.675767  121672 command_runner.go:130] > ID=ubuntu
	I0317 10:51:10.675772  121672 command_runner.go:130] > ID_LIKE=debian
	I0317 10:51:10.675777  121672 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0317 10:51:10.675782  121672 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0317 10:51:10.675804  121672 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0317 10:51:10.675818  121672 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0317 10:51:10.675823  121672 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0317 10:51:10.675891  121672 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0317 10:51:10.675915  121672 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0317 10:51:10.675926  121672 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0317 10:51:10.675935  121672 info.go:137] Remote host: Ubuntu 22.04.5 LTS
	I0317 10:51:10.675956  121672 filesync.go:126] Scanning /home/jenkins/minikube-integration/20535-2262/.minikube/addons for local assets ...
	I0317 10:51:10.676016  121672 filesync.go:126] Scanning /home/jenkins/minikube-integration/20535-2262/.minikube/files for local assets ...
	I0317 10:51:10.676105  121672 filesync.go:149] local asset: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem -> 75722.pem in /etc/ssl/certs
	I0317 10:51:10.676115  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem -> /etc/ssl/certs/75722.pem
	I0317 10:51:10.676217  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0317 10:51:10.684964  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem --> /etc/ssl/certs/75722.pem (1708 bytes)
	I0317 10:51:10.709529  121672 start.go:296] duration metric: took 143.065849ms for postStartSetup
	I0317 10:51:10.709915  121672 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863
	I0317 10:51:10.727221  121672 profile.go:143] Saving config to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/config.json ...
	I0317 10:51:10.727523  121672 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 10:51:10.727574  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:10.746546  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:51:10.831357  121672 command_runner.go:130] > 14%
	I0317 10:51:10.831880  121672 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0317 10:51:10.835913  121672 command_runner.go:130] > 169G
	I0317 10:51:10.836233  121672 start.go:128] duration metric: took 7.232104678s to createHost
	I0317 10:51:10.836247  121672 start.go:83] releasing machines lock for "multinode-286863", held for 7.23222408s
	I0317 10:51:10.836326  121672 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863
	I0317 10:51:10.853377  121672 ssh_runner.go:195] Run: cat /version.json
	I0317 10:51:10.853428  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:10.853754  121672 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0317 10:51:10.853819  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:10.878709  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:51:10.892923  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:51:10.966174  121672 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.46-1741860993-20523", "minikube_version": "v1.35.0", "commit": "71529bb89e3c12f14ea8f1963dd965c6be2666ce"}
	I0317 10:51:10.966346  121672 ssh_runner.go:195] Run: systemctl --version
	I0317 10:51:11.096728  121672 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0317 10:51:11.100128  121672 command_runner.go:130] > systemd 249 (249.11-0ubuntu3.12)
	I0317 10:51:11.100170  121672 command_runner.go:130] > +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified
	I0317 10:51:11.100268  121672 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0317 10:51:11.104753  121672 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0317 10:51:11.104824  121672 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0317 10:51:11.104839  121672 command_runner.go:130] > Device: 36h/54d	Inode: 1315290     Links: 1
	I0317 10:51:11.104847  121672 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0317 10:51:11.104853  121672 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0317 10:51:11.104865  121672 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0317 10:51:11.104871  121672 command_runner.go:130] > Change: 2025-03-17 10:25:40.255332969 +0000
	I0317 10:51:11.104893  121672 command_runner.go:130] >  Birth: 2025-03-17 10:25:40.255332969 +0000
	I0317 10:51:11.104996  121672 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0317 10:51:11.133192  121672 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0317 10:51:11.133346  121672 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0317 10:51:11.167684  121672 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0317 10:51:11.167749  121672 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0317 10:51:11.167757  121672 start.go:495] detecting cgroup driver to use...
	I0317 10:51:11.167805  121672 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0317 10:51:11.167862  121672 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0317 10:51:11.181277  121672 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0317 10:51:11.193542  121672 docker.go:217] disabling cri-docker service (if available) ...
	I0317 10:51:11.193659  121672 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0317 10:51:11.207719  121672 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0317 10:51:11.222749  121672 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0317 10:51:11.313458  121672 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0317 10:51:11.407689  121672 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0317 10:51:11.407790  121672 docker.go:233] disabling docker service ...
	I0317 10:51:11.407868  121672 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0317 10:51:11.432337  121672 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0317 10:51:11.444529  121672 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0317 10:51:11.531668  121672 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0317 10:51:11.531763  121672 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0317 10:51:11.624006  121672 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0317 10:51:11.624134  121672 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0317 10:51:11.635661  121672 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0317 10:51:11.651243  121672 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0317 10:51:11.652728  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0317 10:51:11.662680  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0317 10:51:11.672875  121672 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0317 10:51:11.672981  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0317 10:51:11.682894  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0317 10:51:11.692931  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0317 10:51:11.703090  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0317 10:51:11.712770  121672 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0317 10:51:11.722245  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0317 10:51:11.732562  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0317 10:51:11.742239  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0317 10:51:11.752772  121672 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0317 10:51:11.760516  121672 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0317 10:51:11.761593  121672 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0317 10:51:11.770306  121672 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 10:51:11.866317  121672 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0317 10:51:11.986624  121672 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0317 10:51:11.986691  121672 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0317 10:51:11.990073  121672 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0317 10:51:11.990093  121672 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0317 10:51:11.990099  121672 command_runner.go:130] > Device: 48h/72d	Inode: 175         Links: 1
	I0317 10:51:11.990117  121672 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0317 10:51:11.990123  121672 command_runner.go:130] > Access: 2025-03-17 10:51:11.936844444 +0000
	I0317 10:51:11.990128  121672 command_runner.go:130] > Modify: 2025-03-17 10:51:11.936844444 +0000
	I0317 10:51:11.990133  121672 command_runner.go:130] > Change: 2025-03-17 10:51:11.936844444 +0000
	I0317 10:51:11.990136  121672 command_runner.go:130] >  Birth: -
	I0317 10:51:11.990412  121672 start.go:563] Will wait 60s for crictl version
	I0317 10:51:11.990468  121672 ssh_runner.go:195] Run: which crictl
	I0317 10:51:11.993432  121672 command_runner.go:130] > /usr/bin/crictl
	I0317 10:51:11.993863  121672 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0317 10:51:12.028145  121672 command_runner.go:130] > Version:  0.1.0
	I0317 10:51:12.028164  121672 command_runner.go:130] > RuntimeName:  containerd
	I0317 10:51:12.028170  121672 command_runner.go:130] > RuntimeVersion:  1.7.25
	I0317 10:51:12.028175  121672 command_runner.go:130] > RuntimeApiVersion:  v1
	I0317 10:51:12.030616  121672 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.25
	RuntimeApiVersion:  v1
	I0317 10:51:12.030685  121672 ssh_runner.go:195] Run: containerd --version
	I0317 10:51:12.053374  121672 command_runner.go:130] > containerd containerd.io 1.7.25 bcc810d6b9066471b0b6fa75f557a15a1cbf31bb
	I0317 10:51:12.055225  121672 ssh_runner.go:195] Run: containerd --version
	I0317 10:51:12.078425  121672 command_runner.go:130] > containerd containerd.io 1.7.25 bcc810d6b9066471b0b6fa75f557a15a1cbf31bb
	I0317 10:51:12.084344  121672 out.go:177] * Preparing Kubernetes v1.32.2 on containerd 1.7.25 ...
	I0317 10:51:12.087275  121672 cli_runner.go:164] Run: docker network inspect multinode-286863 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0317 10:51:12.105048  121672 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0317 10:51:12.108995  121672 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0317 10:51:12.120516  121672 kubeadm.go:883] updating cluster {Name:multinode-286863 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APISe
rverNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:fals
e CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0317 10:51:12.120635  121672 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
	I0317 10:51:12.120708  121672 ssh_runner.go:195] Run: sudo crictl images --output json
	I0317 10:51:12.160388  121672 command_runner.go:130] > {
	I0317 10:51:12.160413  121672 command_runner.go:130] >   "images": [
	I0317 10:51:12.160419  121672 command_runner.go:130] >     {
	I0317 10:51:12.160429  121672 command_runner.go:130] >       "id": "sha256:e1181ee320546c66f17956a302db1b7899d88a593f116726718851133de588b6",
	I0317 10:51:12.160438  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.160444  121672 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20241212-9f82dd49"
	I0317 10:51:12.160448  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160453  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.160462  121672 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:56ea59f77258052c4506076525318ffa66817500f68e94a50fdf7d600a280d26"
	I0317 10:51:12.160473  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160480  121672 command_runner.go:130] >       "size": "35679862",
	I0317 10:51:12.160484  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.160489  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.160496  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.160500  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.160504  121672 command_runner.go:130] >     },
	I0317 10:51:12.160515  121672 command_runner.go:130] >     {
	I0317 10:51:12.160528  121672 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0317 10:51:12.160534  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.160540  121672 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0317 10:51:12.160544  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160548  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.160556  121672 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0317 10:51:12.160560  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160565  121672 command_runner.go:130] >       "size": "8034419",
	I0317 10:51:12.160572  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.160581  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.160588  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.160592  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.160595  121672 command_runner.go:130] >     },
	I0317 10:51:12.160598  121672 command_runner.go:130] >     {
	I0317 10:51:12.160605  121672 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0317 10:51:12.160613  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.160621  121672 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0317 10:51:12.160625  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160630  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.160640  121672 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0317 10:51:12.160644  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160648  121672 command_runner.go:130] >       "size": "16948420",
	I0317 10:51:12.160652  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.160660  121672 command_runner.go:130] >       "username": "nonroot",
	I0317 10:51:12.160664  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.160674  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.160680  121672 command_runner.go:130] >     },
	I0317 10:51:12.160684  121672 command_runner.go:130] >     {
	I0317 10:51:12.160691  121672 command_runner.go:130] >       "id": "sha256:7fc9d4aa817aa6a3e549f3cd49d1f7b496407be979fc36dd5f356d59ce8c3a82",
	I0317 10:51:12.160695  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.160702  121672 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.16-0"
	I0317 10:51:12.160706  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160716  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.160724  121672 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5"
	I0317 10:51:12.160730  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160734  121672 command_runner.go:130] >       "size": "67941650",
	I0317 10:51:12.160741  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.160749  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.160754  121672 command_runner.go:130] >       },
	I0317 10:51:12.160758  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.160765  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.160776  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.160782  121672 command_runner.go:130] >     },
	I0317 10:51:12.160787  121672 command_runner.go:130] >     {
	I0317 10:51:12.160794  121672 command_runner.go:130] >       "id": "sha256:6417e1437b6d9a789e1ca789695a574e1df00a632bdbfbcae9695c9a7d500e32",
	I0317 10:51:12.160802  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.160807  121672 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.32.2"
	I0317 10:51:12.160813  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160817  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.160830  121672 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f"
	I0317 10:51:12.160836  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160840  121672 command_runner.go:130] >       "size": "26215036",
	I0317 10:51:12.160845  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.160850  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.160856  121672 command_runner.go:130] >       },
	I0317 10:51:12.160860  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.160864  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.160868  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.160877  121672 command_runner.go:130] >     },
	I0317 10:51:12.160883  121672 command_runner.go:130] >     {
	I0317 10:51:12.160890  121672 command_runner.go:130] >       "id": "sha256:3c9285acfd2ff7915bb451cc40ac060366ac519f3fef00c455f5aca0e0346c4d",
	I0317 10:51:12.160894  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.160899  121672 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.32.2"
	I0317 10:51:12.160902  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160906  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.160915  121672 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90"
	I0317 10:51:12.160928  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160933  121672 command_runner.go:130] >       "size": "23968941",
	I0317 10:51:12.160937  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.160940  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.160944  121672 command_runner.go:130] >       },
	I0317 10:51:12.160955  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.160960  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.160964  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.160970  121672 command_runner.go:130] >     },
	I0317 10:51:12.160973  121672 command_runner.go:130] >     {
	I0317 10:51:12.160980  121672 command_runner.go:130] >       "id": "sha256:e5aac5df76d9b8dc899ab8c4db25a7648e7fb25cafe7a155066247883c78f062",
	I0317 10:51:12.160983  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.160989  121672 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.32.2"
	I0317 10:51:12.160992  121672 command_runner.go:130] >       ],
	I0317 10:51:12.160995  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.161004  121672 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d"
	I0317 10:51:12.161009  121672 command_runner.go:130] >       ],
	I0317 10:51:12.161014  121672 command_runner.go:130] >       "size": "27362401",
	I0317 10:51:12.161018  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.161028  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.161034  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.161039  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.161042  121672 command_runner.go:130] >     },
	I0317 10:51:12.161044  121672 command_runner.go:130] >     {
	I0317 10:51:12.161052  121672 command_runner.go:130] >       "id": "sha256:82dfa03f692fb5d84f66c17d6ee9126b081182152b25d28ea456d89b7d5d8911",
	I0317 10:51:12.161062  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.161068  121672 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.32.2"
	I0317 10:51:12.161071  121672 command_runner.go:130] >       ],
	I0317 10:51:12.161075  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.161083  121672 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76"
	I0317 10:51:12.161088  121672 command_runner.go:130] >       ],
	I0317 10:51:12.161092  121672 command_runner.go:130] >       "size": "18921614",
	I0317 10:51:12.161098  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.161102  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.161107  121672 command_runner.go:130] >       },
	I0317 10:51:12.161111  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.161123  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.161127  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.161130  121672 command_runner.go:130] >     },
	I0317 10:51:12.161133  121672 command_runner.go:130] >     {
	I0317 10:51:12.161140  121672 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0317 10:51:12.161147  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.161152  121672 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0317 10:51:12.161156  121672 command_runner.go:130] >       ],
	I0317 10:51:12.161160  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.161168  121672 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0317 10:51:12.161171  121672 command_runner.go:130] >       ],
	I0317 10:51:12.161175  121672 command_runner.go:130] >       "size": "267933",
	I0317 10:51:12.161180  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.161185  121672 command_runner.go:130] >         "value": "65535"
	I0317 10:51:12.161190  121672 command_runner.go:130] >       },
	I0317 10:51:12.161194  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.161200  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.161204  121672 command_runner.go:130] >       "pinned": true
	I0317 10:51:12.161215  121672 command_runner.go:130] >     }
	I0317 10:51:12.161218  121672 command_runner.go:130] >   ]
	I0317 10:51:12.161220  121672 command_runner.go:130] > }
	I0317 10:51:12.164106  121672 containerd.go:627] all images are preloaded for containerd runtime.
	I0317 10:51:12.164131  121672 containerd.go:534] Images already preloaded, skipping extraction
	I0317 10:51:12.164191  121672 ssh_runner.go:195] Run: sudo crictl images --output json
	I0317 10:51:12.198412  121672 command_runner.go:130] > {
	I0317 10:51:12.198436  121672 command_runner.go:130] >   "images": [
	I0317 10:51:12.198441  121672 command_runner.go:130] >     {
	I0317 10:51:12.198450  121672 command_runner.go:130] >       "id": "sha256:e1181ee320546c66f17956a302db1b7899d88a593f116726718851133de588b6",
	I0317 10:51:12.198454  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.198460  121672 command_runner.go:130] >         "docker.io/kindest/kindnetd:v20241212-9f82dd49"
	I0317 10:51:12.198464  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198468  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.198478  121672 command_runner.go:130] >         "docker.io/kindest/kindnetd@sha256:56ea59f77258052c4506076525318ffa66817500f68e94a50fdf7d600a280d26"
	I0317 10:51:12.198484  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198488  121672 command_runner.go:130] >       "size": "35679862",
	I0317 10:51:12.198501  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.198507  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.198511  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.198522  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.198525  121672 command_runner.go:130] >     },
	I0317 10:51:12.198528  121672 command_runner.go:130] >     {
	I0317 10:51:12.198537  121672 command_runner.go:130] >       "id": "sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6",
	I0317 10:51:12.198544  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.198550  121672 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner:v5"
	I0317 10:51:12.198554  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198558  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.198581  121672 command_runner.go:130] >         "gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"
	I0317 10:51:12.198585  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198589  121672 command_runner.go:130] >       "size": "8034419",
	I0317 10:51:12.198598  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.198602  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.198605  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.198610  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.198615  121672 command_runner.go:130] >     },
	I0317 10:51:12.198619  121672 command_runner.go:130] >     {
	I0317 10:51:12.198631  121672 command_runner.go:130] >       "id": "sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4",
	I0317 10:51:12.198635  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.198650  121672 command_runner.go:130] >         "registry.k8s.io/coredns/coredns:v1.11.3"
	I0317 10:51:12.198654  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198660  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.198668  121672 command_runner.go:130] >         "registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"
	I0317 10:51:12.198674  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198679  121672 command_runner.go:130] >       "size": "16948420",
	I0317 10:51:12.198682  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.198687  121672 command_runner.go:130] >       "username": "nonroot",
	I0317 10:51:12.198693  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.198697  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.198700  121672 command_runner.go:130] >     },
	I0317 10:51:12.198705  121672 command_runner.go:130] >     {
	I0317 10:51:12.198712  121672 command_runner.go:130] >       "id": "sha256:7fc9d4aa817aa6a3e549f3cd49d1f7b496407be979fc36dd5f356d59ce8c3a82",
	I0317 10:51:12.198719  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.198725  121672 command_runner.go:130] >         "registry.k8s.io/etcd:3.5.16-0"
	I0317 10:51:12.198728  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198733  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.198746  121672 command_runner.go:130] >         "registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5"
	I0317 10:51:12.198750  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198759  121672 command_runner.go:130] >       "size": "67941650",
	I0317 10:51:12.198763  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.198767  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.198774  121672 command_runner.go:130] >       },
	I0317 10:51:12.198783  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.198788  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.198792  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.198798  121672 command_runner.go:130] >     },
	I0317 10:51:12.198801  121672 command_runner.go:130] >     {
	I0317 10:51:12.198808  121672 command_runner.go:130] >       "id": "sha256:6417e1437b6d9a789e1ca789695a574e1df00a632bdbfbcae9695c9a7d500e32",
	I0317 10:51:12.198814  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.198819  121672 command_runner.go:130] >         "registry.k8s.io/kube-apiserver:v1.32.2"
	I0317 10:51:12.198823  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198829  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.198842  121672 command_runner.go:130] >         "registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f"
	I0317 10:51:12.198849  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198853  121672 command_runner.go:130] >       "size": "26215036",
	I0317 10:51:12.198856  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.198860  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.198884  121672 command_runner.go:130] >       },
	I0317 10:51:12.198891  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.198896  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.198899  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.198903  121672 command_runner.go:130] >     },
	I0317 10:51:12.198906  121672 command_runner.go:130] >     {
	I0317 10:51:12.198913  121672 command_runner.go:130] >       "id": "sha256:3c9285acfd2ff7915bb451cc40ac060366ac519f3fef00c455f5aca0e0346c4d",
	I0317 10:51:12.198920  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.198926  121672 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager:v1.32.2"
	I0317 10:51:12.198930  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198934  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.198945  121672 command_runner.go:130] >         "registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90"
	I0317 10:51:12.198950  121672 command_runner.go:130] >       ],
	I0317 10:51:12.198954  121672 command_runner.go:130] >       "size": "23968941",
	I0317 10:51:12.198958  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.198961  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.198966  121672 command_runner.go:130] >       },
	I0317 10:51:12.198976  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.198984  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.198988  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.198992  121672 command_runner.go:130] >     },
	I0317 10:51:12.198997  121672 command_runner.go:130] >     {
	I0317 10:51:12.199004  121672 command_runner.go:130] >       "id": "sha256:e5aac5df76d9b8dc899ab8c4db25a7648e7fb25cafe7a155066247883c78f062",
	I0317 10:51:12.199011  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.199017  121672 command_runner.go:130] >         "registry.k8s.io/kube-proxy:v1.32.2"
	I0317 10:51:12.199021  121672 command_runner.go:130] >       ],
	I0317 10:51:12.199025  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.199035  121672 command_runner.go:130] >         "registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d"
	I0317 10:51:12.199041  121672 command_runner.go:130] >       ],
	I0317 10:51:12.199045  121672 command_runner.go:130] >       "size": "27362401",
	I0317 10:51:12.199048  121672 command_runner.go:130] >       "uid": null,
	I0317 10:51:12.199052  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.199056  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.199065  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.199068  121672 command_runner.go:130] >     },
	I0317 10:51:12.199071  121672 command_runner.go:130] >     {
	I0317 10:51:12.199078  121672 command_runner.go:130] >       "id": "sha256:82dfa03f692fb5d84f66c17d6ee9126b081182152b25d28ea456d89b7d5d8911",
	I0317 10:51:12.199086  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.199091  121672 command_runner.go:130] >         "registry.k8s.io/kube-scheduler:v1.32.2"
	I0317 10:51:12.199094  121672 command_runner.go:130] >       ],
	I0317 10:51:12.199103  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.199112  121672 command_runner.go:130] >         "registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76"
	I0317 10:51:12.199118  121672 command_runner.go:130] >       ],
	I0317 10:51:12.199122  121672 command_runner.go:130] >       "size": "18921614",
	I0317 10:51:12.199126  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.199130  121672 command_runner.go:130] >         "value": "0"
	I0317 10:51:12.199133  121672 command_runner.go:130] >       },
	I0317 10:51:12.199140  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.199145  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.199151  121672 command_runner.go:130] >       "pinned": false
	I0317 10:51:12.199159  121672 command_runner.go:130] >     },
	I0317 10:51:12.199165  121672 command_runner.go:130] >     {
	I0317 10:51:12.199172  121672 command_runner.go:130] >       "id": "sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8",
	I0317 10:51:12.199177  121672 command_runner.go:130] >       "repoTags": [
	I0317 10:51:12.199184  121672 command_runner.go:130] >         "registry.k8s.io/pause:3.10"
	I0317 10:51:12.199187  121672 command_runner.go:130] >       ],
	I0317 10:51:12.199191  121672 command_runner.go:130] >       "repoDigests": [
	I0317 10:51:12.199201  121672 command_runner.go:130] >         "registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"
	I0317 10:51:12.199208  121672 command_runner.go:130] >       ],
	I0317 10:51:12.199213  121672 command_runner.go:130] >       "size": "267933",
	I0317 10:51:12.199216  121672 command_runner.go:130] >       "uid": {
	I0317 10:51:12.199221  121672 command_runner.go:130] >         "value": "65535"
	I0317 10:51:12.199225  121672 command_runner.go:130] >       },
	I0317 10:51:12.199231  121672 command_runner.go:130] >       "username": "",
	I0317 10:51:12.199235  121672 command_runner.go:130] >       "spec": null,
	I0317 10:51:12.199240  121672 command_runner.go:130] >       "pinned": true
	I0317 10:51:12.199244  121672 command_runner.go:130] >     }
	I0317 10:51:12.199247  121672 command_runner.go:130] >   ]
	I0317 10:51:12.199250  121672 command_runner.go:130] > }
	I0317 10:51:12.199360  121672 containerd.go:627] all images are preloaded for containerd runtime.
	I0317 10:51:12.199372  121672 cache_images.go:84] Images are preloaded, skipping loading
	I0317 10:51:12.199380  121672 kubeadm.go:934] updating node { 192.168.67.2 8443 v1.32.2 containerd true true} ...
	I0317 10:51:12.199497  121672 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-286863 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0317 10:51:12.199577  121672 ssh_runner.go:195] Run: sudo crictl info
	I0317 10:51:12.239195  121672 command_runner.go:130] > {
	I0317 10:51:12.239220  121672 command_runner.go:130] >   "status": {
	I0317 10:51:12.239232  121672 command_runner.go:130] >     "conditions": [
	I0317 10:51:12.239236  121672 command_runner.go:130] >       {
	I0317 10:51:12.239244  121672 command_runner.go:130] >         "type": "RuntimeReady",
	I0317 10:51:12.239249  121672 command_runner.go:130] >         "status": true,
	I0317 10:51:12.239253  121672 command_runner.go:130] >         "reason": "",
	I0317 10:51:12.239257  121672 command_runner.go:130] >         "message": ""
	I0317 10:51:12.239262  121672 command_runner.go:130] >       },
	I0317 10:51:12.239274  121672 command_runner.go:130] >       {
	I0317 10:51:12.239284  121672 command_runner.go:130] >         "type": "NetworkReady",
	I0317 10:51:12.239288  121672 command_runner.go:130] >         "status": true,
	I0317 10:51:12.239296  121672 command_runner.go:130] >         "reason": "",
	I0317 10:51:12.239303  121672 command_runner.go:130] >         "message": ""
	I0317 10:51:12.239310  121672 command_runner.go:130] >       },
	I0317 10:51:12.239313  121672 command_runner.go:130] >       {
	I0317 10:51:12.239318  121672 command_runner.go:130] >         "type": "ContainerdHasNoDeprecationWarnings",
	I0317 10:51:12.239326  121672 command_runner.go:130] >         "status": true,
	I0317 10:51:12.239330  121672 command_runner.go:130] >         "reason": "",
	I0317 10:51:12.239337  121672 command_runner.go:130] >         "message": ""
	I0317 10:51:12.239340  121672 command_runner.go:130] >       }
	I0317 10:51:12.239345  121672 command_runner.go:130] >     ]
	I0317 10:51:12.239351  121672 command_runner.go:130] >   },
	I0317 10:51:12.239354  121672 command_runner.go:130] >   "cniconfig": {
	I0317 10:51:12.239360  121672 command_runner.go:130] >     "PluginDirs": [
	I0317 10:51:12.239365  121672 command_runner.go:130] >       "/opt/cni/bin"
	I0317 10:51:12.239371  121672 command_runner.go:130] >     ],
	I0317 10:51:12.239379  121672 command_runner.go:130] >     "PluginConfDir": "/etc/cni/net.d",
	I0317 10:51:12.239387  121672 command_runner.go:130] >     "PluginMaxConfNum": 1,
	I0317 10:51:12.239391  121672 command_runner.go:130] >     "Prefix": "eth",
	I0317 10:51:12.239399  121672 command_runner.go:130] >     "Networks": [
	I0317 10:51:12.239403  121672 command_runner.go:130] >       {
	I0317 10:51:12.239411  121672 command_runner.go:130] >         "Config": {
	I0317 10:51:12.239416  121672 command_runner.go:130] >           "Name": "cni-loopback",
	I0317 10:51:12.239430  121672 command_runner.go:130] >           "CNIVersion": "0.3.1",
	I0317 10:51:12.239435  121672 command_runner.go:130] >           "Plugins": [
	I0317 10:51:12.239441  121672 command_runner.go:130] >             {
	I0317 10:51:12.239445  121672 command_runner.go:130] >               "Network": {
	I0317 10:51:12.239451  121672 command_runner.go:130] >                 "type": "loopback",
	I0317 10:51:12.239460  121672 command_runner.go:130] >                 "ipam": {},
	I0317 10:51:12.239464  121672 command_runner.go:130] >                 "dns": {}
	I0317 10:51:12.239471  121672 command_runner.go:130] >               },
	I0317 10:51:12.239480  121672 command_runner.go:130] >               "Source": "{\"type\":\"loopback\"}"
	I0317 10:51:12.239487  121672 command_runner.go:130] >             }
	I0317 10:51:12.239490  121672 command_runner.go:130] >           ],
	I0317 10:51:12.239506  121672 command_runner.go:130] >           "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
	I0317 10:51:12.239514  121672 command_runner.go:130] >         },
	I0317 10:51:12.239518  121672 command_runner.go:130] >         "IFName": "lo"
	I0317 10:51:12.239521  121672 command_runner.go:130] >       },
	I0317 10:51:12.239524  121672 command_runner.go:130] >       {
	I0317 10:51:12.239528  121672 command_runner.go:130] >         "Config": {
	I0317 10:51:12.239533  121672 command_runner.go:130] >           "Name": "loopback",
	I0317 10:51:12.239539  121672 command_runner.go:130] >           "CNIVersion": "1.0.0",
	I0317 10:51:12.239543  121672 command_runner.go:130] >           "Plugins": [
	I0317 10:51:12.239548  121672 command_runner.go:130] >             {
	I0317 10:51:12.239552  121672 command_runner.go:130] >               "Network": {
	I0317 10:51:12.239560  121672 command_runner.go:130] >                 "cniVersion": "1.0.0",
	I0317 10:51:12.239567  121672 command_runner.go:130] >                 "name": "loopback",
	I0317 10:51:12.239575  121672 command_runner.go:130] >                 "type": "loopback",
	I0317 10:51:12.239580  121672 command_runner.go:130] >                 "ipam": {},
	I0317 10:51:12.239589  121672 command_runner.go:130] >                 "dns": {}
	I0317 10:51:12.239598  121672 command_runner.go:130] >               },
	I0317 10:51:12.239606  121672 command_runner.go:130] >               "Source": "{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"type\":\"loopback\"}"
	I0317 10:51:12.239613  121672 command_runner.go:130] >             }
	I0317 10:51:12.239617  121672 command_runner.go:130] >           ],
	I0317 10:51:12.239627  121672 command_runner.go:130] >           "Source": "{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"plugins\":[{\"cniVersion\":\"1.0.0\",\"name\":\"loopback\",\"type\":\"loopback\"}]}"
	I0317 10:51:12.239634  121672 command_runner.go:130] >         },
	I0317 10:51:12.239638  121672 command_runner.go:130] >         "IFName": "eth0"
	I0317 10:51:12.239641  121672 command_runner.go:130] >       }
	I0317 10:51:12.239644  121672 command_runner.go:130] >     ]
	I0317 10:51:12.239647  121672 command_runner.go:130] >   },
	I0317 10:51:12.239651  121672 command_runner.go:130] >   "config": {
	I0317 10:51:12.239657  121672 command_runner.go:130] >     "containerd": {
	I0317 10:51:12.239661  121672 command_runner.go:130] >       "snapshotter": "overlayfs",
	I0317 10:51:12.239669  121672 command_runner.go:130] >       "defaultRuntimeName": "runc",
	I0317 10:51:12.239679  121672 command_runner.go:130] >       "defaultRuntime": {
	I0317 10:51:12.239687  121672 command_runner.go:130] >         "runtimeType": "",
	I0317 10:51:12.239692  121672 command_runner.go:130] >         "runtimePath": "",
	I0317 10:51:12.239699  121672 command_runner.go:130] >         "runtimeEngine": "",
	I0317 10:51:12.239704  121672 command_runner.go:130] >         "PodAnnotations": null,
	I0317 10:51:12.239717  121672 command_runner.go:130] >         "ContainerAnnotations": null,
	I0317 10:51:12.239724  121672 command_runner.go:130] >         "runtimeRoot": "",
	I0317 10:51:12.239729  121672 command_runner.go:130] >         "options": null,
	I0317 10:51:12.239734  121672 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0317 10:51:12.239745  121672 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0317 10:51:12.239750  121672 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0317 10:51:12.239755  121672 command_runner.go:130] >         "cniConfDir": "",
	I0317 10:51:12.239760  121672 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0317 10:51:12.239767  121672 command_runner.go:130] >         "snapshotter": "",
	I0317 10:51:12.239772  121672 command_runner.go:130] >         "sandboxMode": ""
	I0317 10:51:12.239778  121672 command_runner.go:130] >       },
	I0317 10:51:12.239783  121672 command_runner.go:130] >       "untrustedWorkloadRuntime": {
	I0317 10:51:12.239790  121672 command_runner.go:130] >         "runtimeType": "",
	I0317 10:51:12.239795  121672 command_runner.go:130] >         "runtimePath": "",
	I0317 10:51:12.239816  121672 command_runner.go:130] >         "runtimeEngine": "",
	I0317 10:51:12.239825  121672 command_runner.go:130] >         "PodAnnotations": null,
	I0317 10:51:12.239830  121672 command_runner.go:130] >         "ContainerAnnotations": null,
	I0317 10:51:12.239839  121672 command_runner.go:130] >         "runtimeRoot": "",
	I0317 10:51:12.239843  121672 command_runner.go:130] >         "options": null,
	I0317 10:51:12.239851  121672 command_runner.go:130] >         "privileged_without_host_devices": false,
	I0317 10:51:12.239861  121672 command_runner.go:130] >         "privileged_without_host_devices_all_devices_allowed": false,
	I0317 10:51:12.239870  121672 command_runner.go:130] >         "baseRuntimeSpec": "",
	I0317 10:51:12.239879  121672 command_runner.go:130] >         "cniConfDir": "",
	I0317 10:51:12.239883  121672 command_runner.go:130] >         "cniMaxConfNum": 0,
	I0317 10:51:12.239890  121672 command_runner.go:130] >         "snapshotter": "",
	I0317 10:51:12.239895  121672 command_runner.go:130] >         "sandboxMode": ""
	I0317 10:51:12.239901  121672 command_runner.go:130] >       },
	I0317 10:51:12.239907  121672 command_runner.go:130] >       "runtimes": {
	I0317 10:51:12.239918  121672 command_runner.go:130] >         "runc": {
	I0317 10:51:12.239924  121672 command_runner.go:130] >           "runtimeType": "io.containerd.runc.v2",
	I0317 10:51:12.239932  121672 command_runner.go:130] >           "runtimePath": "",
	I0317 10:51:12.239937  121672 command_runner.go:130] >           "runtimeEngine": "",
	I0317 10:51:12.239943  121672 command_runner.go:130] >           "PodAnnotations": null,
	I0317 10:51:12.239948  121672 command_runner.go:130] >           "ContainerAnnotations": null,
	I0317 10:51:12.239955  121672 command_runner.go:130] >           "runtimeRoot": "",
	I0317 10:51:12.239960  121672 command_runner.go:130] >           "options": {
	I0317 10:51:12.239967  121672 command_runner.go:130] >             "SystemdCgroup": false
	I0317 10:51:12.239971  121672 command_runner.go:130] >           },
	I0317 10:51:12.239979  121672 command_runner.go:130] >           "privileged_without_host_devices": false,
	I0317 10:51:12.239985  121672 command_runner.go:130] >           "privileged_without_host_devices_all_devices_allowed": false,
	I0317 10:51:12.239993  121672 command_runner.go:130] >           "baseRuntimeSpec": "",
	I0317 10:51:12.239998  121672 command_runner.go:130] >           "cniConfDir": "",
	I0317 10:51:12.240005  121672 command_runner.go:130] >           "cniMaxConfNum": 0,
	I0317 10:51:12.240010  121672 command_runner.go:130] >           "snapshotter": "",
	I0317 10:51:12.240015  121672 command_runner.go:130] >           "sandboxMode": "podsandbox"
	I0317 10:51:12.240019  121672 command_runner.go:130] >         }
	I0317 10:51:12.240023  121672 command_runner.go:130] >       },
	I0317 10:51:12.240029  121672 command_runner.go:130] >       "noPivot": false,
	I0317 10:51:12.240034  121672 command_runner.go:130] >       "disableSnapshotAnnotations": true,
	I0317 10:51:12.240042  121672 command_runner.go:130] >       "discardUnpackedLayers": true,
	I0317 10:51:12.240047  121672 command_runner.go:130] >       "ignoreBlockIONotEnabledErrors": false,
	I0317 10:51:12.240055  121672 command_runner.go:130] >       "ignoreRdtNotEnabledErrors": false
	I0317 10:51:12.240058  121672 command_runner.go:130] >     },
	I0317 10:51:12.240062  121672 command_runner.go:130] >     "cni": {
	I0317 10:51:12.240067  121672 command_runner.go:130] >       "binDir": "/opt/cni/bin",
	I0317 10:51:12.240075  121672 command_runner.go:130] >       "confDir": "/etc/cni/net.d",
	I0317 10:51:12.240079  121672 command_runner.go:130] >       "maxConfNum": 1,
	I0317 10:51:12.240087  121672 command_runner.go:130] >       "setupSerially": false,
	I0317 10:51:12.240091  121672 command_runner.go:130] >       "confTemplate": "",
	I0317 10:51:12.240098  121672 command_runner.go:130] >       "ipPref": ""
	I0317 10:51:12.240102  121672 command_runner.go:130] >     },
	I0317 10:51:12.240109  121672 command_runner.go:130] >     "registry": {
	I0317 10:51:12.240117  121672 command_runner.go:130] >       "configPath": "/etc/containerd/certs.d",
	I0317 10:51:12.240121  121672 command_runner.go:130] >       "mirrors": null,
	I0317 10:51:12.240128  121672 command_runner.go:130] >       "configs": null,
	I0317 10:51:12.240133  121672 command_runner.go:130] >       "auths": null,
	I0317 10:51:12.240140  121672 command_runner.go:130] >       "headers": null
	I0317 10:51:12.240144  121672 command_runner.go:130] >     },
	I0317 10:51:12.240152  121672 command_runner.go:130] >     "imageDecryption": {
	I0317 10:51:12.240169  121672 command_runner.go:130] >       "keyModel": "node"
	I0317 10:51:12.240176  121672 command_runner.go:130] >     },
	I0317 10:51:12.240181  121672 command_runner.go:130] >     "disableTCPService": true,
	I0317 10:51:12.240185  121672 command_runner.go:130] >     "streamServerAddress": "",
	I0317 10:51:12.240191  121672 command_runner.go:130] >     "streamServerPort": "10010",
	I0317 10:51:12.240195  121672 command_runner.go:130] >     "streamIdleTimeout": "4h0m0s",
	I0317 10:51:12.240205  121672 command_runner.go:130] >     "enableSelinux": false,
	I0317 10:51:12.240210  121672 command_runner.go:130] >     "selinuxCategoryRange": 1024,
	I0317 10:51:12.240221  121672 command_runner.go:130] >     "sandboxImage": "registry.k8s.io/pause:3.10",
	I0317 10:51:12.240229  121672 command_runner.go:130] >     "statsCollectPeriod": 10,
	I0317 10:51:12.240233  121672 command_runner.go:130] >     "systemdCgroup": false,
	I0317 10:51:12.240241  121672 command_runner.go:130] >     "enableTLSStreaming": false,
	I0317 10:51:12.240245  121672 command_runner.go:130] >     "x509KeyPairStreaming": {
	I0317 10:51:12.240253  121672 command_runner.go:130] >       "tlsCertFile": "",
	I0317 10:51:12.240258  121672 command_runner.go:130] >       "tlsKeyFile": ""
	I0317 10:51:12.240264  121672 command_runner.go:130] >     },
	I0317 10:51:12.240269  121672 command_runner.go:130] >     "maxContainerLogSize": 16384,
	I0317 10:51:12.240273  121672 command_runner.go:130] >     "disableCgroup": false,
	I0317 10:51:12.240279  121672 command_runner.go:130] >     "disableApparmor": false,
	I0317 10:51:12.240284  121672 command_runner.go:130] >     "restrictOOMScoreAdj": false,
	I0317 10:51:12.240292  121672 command_runner.go:130] >     "maxConcurrentDownloads": 3,
	I0317 10:51:12.240297  121672 command_runner.go:130] >     "disableProcMount": false,
	I0317 10:51:12.240305  121672 command_runner.go:130] >     "unsetSeccompProfile": "",
	I0317 10:51:12.240310  121672 command_runner.go:130] >     "tolerateMissingHugetlbController": true,
	I0317 10:51:12.240318  121672 command_runner.go:130] >     "disableHugetlbController": true,
	I0317 10:51:12.240327  121672 command_runner.go:130] >     "device_ownership_from_security_context": false,
	I0317 10:51:12.240336  121672 command_runner.go:130] >     "ignoreImageDefinedVolumes": false,
	I0317 10:51:12.240341  121672 command_runner.go:130] >     "netnsMountsUnderStateDir": false,
	I0317 10:51:12.240349  121672 command_runner.go:130] >     "enableUnprivilegedPorts": true,
	I0317 10:51:12.240354  121672 command_runner.go:130] >     "enableUnprivilegedICMP": false,
	I0317 10:51:12.240362  121672 command_runner.go:130] >     "enableCDI": false,
	I0317 10:51:12.240365  121672 command_runner.go:130] >     "cdiSpecDirs": [
	I0317 10:51:12.240369  121672 command_runner.go:130] >       "/etc/cdi",
	I0317 10:51:12.240375  121672 command_runner.go:130] >       "/var/run/cdi"
	I0317 10:51:12.240378  121672 command_runner.go:130] >     ],
	I0317 10:51:12.240383  121672 command_runner.go:130] >     "imagePullProgressTimeout": "5m0s",
	I0317 10:51:12.240391  121672 command_runner.go:130] >     "drainExecSyncIOTimeout": "0s",
	I0317 10:51:12.240396  121672 command_runner.go:130] >     "imagePullWithSyncFs": false,
	I0317 10:51:12.240404  121672 command_runner.go:130] >     "ignoreDeprecationWarnings": null,
	I0317 10:51:12.240409  121672 command_runner.go:130] >     "containerdRootDir": "/var/lib/containerd",
	I0317 10:51:12.240418  121672 command_runner.go:130] >     "containerdEndpoint": "/run/containerd/containerd.sock",
	I0317 10:51:12.240429  121672 command_runner.go:130] >     "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
	I0317 10:51:12.240439  121672 command_runner.go:130] >     "stateDir": "/run/containerd/io.containerd.grpc.v1.cri"
	I0317 10:51:12.240446  121672 command_runner.go:130] >   },
	I0317 10:51:12.240450  121672 command_runner.go:130] >   "golang": "go1.22.10",
	I0317 10:51:12.240454  121672 command_runner.go:130] >   "lastCNILoadStatus": "OK",
	I0317 10:51:12.240461  121672 command_runner.go:130] >   "lastCNILoadStatus.default": "OK"
	I0317 10:51:12.240469  121672 command_runner.go:130] > }
	I0317 10:51:12.242958  121672 cni.go:84] Creating CNI manager for ""
	I0317 10:51:12.242980  121672 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0317 10:51:12.242989  121672 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0317 10:51:12.243010  121672 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.67.2 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-286863 NodeName:multinode-286863 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.67.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.67.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPat
h:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///run/containerd/containerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0317 10:51:12.243134  121672 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.67.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///run/containerd/containerd.sock
	  name: "multinode-286863"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "192.168.67.2"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.67.2"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///run/containerd/containerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0317 10:51:12.243236  121672 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0317 10:51:12.251077  121672 command_runner.go:130] > kubeadm
	I0317 10:51:12.251163  121672 command_runner.go:130] > kubectl
	I0317 10:51:12.251173  121672 command_runner.go:130] > kubelet
	I0317 10:51:12.252142  121672 binaries.go:44] Found k8s binaries, skipping transfer
	I0317 10:51:12.252226  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0317 10:51:12.260768  121672 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (320 bytes)
	I0317 10:51:12.278603  121672 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0317 10:51:12.296297  121672 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2306 bytes)
	I0317 10:51:12.314047  121672 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0317 10:51:12.317286  121672 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0317 10:51:12.327709  121672 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 10:51:12.406163  121672 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0317 10:51:12.421662  121672 certs.go:68] Setting up /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863 for IP: 192.168.67.2
	I0317 10:51:12.421684  121672 certs.go:194] generating shared ca certs ...
	I0317 10:51:12.421743  121672 certs.go:226] acquiring lock for ca certs: {Name:mk5a5307154bd473cdb748bc6e62d2139b42123a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:12.421929  121672 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key
	I0317 10:51:12.421994  121672 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key
	I0317 10:51:12.422008  121672 certs.go:256] generating profile certs ...
	I0317 10:51:12.422082  121672 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.key
	I0317 10:51:12.422102  121672 crypto.go:68] Generating cert /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.crt with IP's: []
	I0317 10:51:12.976352  121672 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.crt ...
	I0317 10:51:12.976385  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.crt: {Name:mkba022c669331ddcb684bd7807006611a677602 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:12.976612  121672 crypto.go:164] Writing key to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.key ...
	I0317 10:51:12.976626  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.key: {Name:mka3698cf07d670b0641d81fda6fe3226facbf54 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:12.976717  121672 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.key.8d0a2d6f
	I0317 10:51:12.976734  121672 crypto.go:68] Generating cert /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.crt.8d0a2d6f with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.67.2]
	I0317 10:51:13.306408  121672 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.crt.8d0a2d6f ...
	I0317 10:51:13.306486  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.crt.8d0a2d6f: {Name:mk81b699e6b2963cb5585d89de7d836bc8faffad Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:13.306730  121672 crypto.go:164] Writing key to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.key.8d0a2d6f ...
	I0317 10:51:13.306765  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.key.8d0a2d6f: {Name:mk90cff672bf0848ac869ad5c64bc16eefc43982 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:13.306927  121672 certs.go:381] copying /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.crt.8d0a2d6f -> /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.crt
	I0317 10:51:13.307023  121672 certs.go:385] copying /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.key.8d0a2d6f -> /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.key
	I0317 10:51:13.307086  121672 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.key
	I0317 10:51:13.307107  121672 crypto.go:68] Generating cert /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.crt with IP's: []
	I0317 10:51:13.461212  121672 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.crt ...
	I0317 10:51:13.461247  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.crt: {Name:mkd74da7f6de66137734473780281f8ce0e464fb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:13.462067  121672 crypto.go:164] Writing key to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.key ...
	I0317 10:51:13.462083  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.key: {Name:mk7e82009d19837ad9f96c66258d8630b2b4c847 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:13.462172  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0317 10:51:13.462193  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0317 10:51:13.462205  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0317 10:51:13.462223  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0317 10:51:13.462242  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0317 10:51:13.462257  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0317 10:51:13.462274  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0317 10:51:13.462287  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0317 10:51:13.462348  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem (1338 bytes)
	W0317 10:51:13.462387  121672 certs.go:480] ignoring /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572_empty.pem, impossibly tiny 0 bytes
	I0317 10:51:13.462399  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem (1675 bytes)
	I0317 10:51:13.462423  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem (1078 bytes)
	I0317 10:51:13.462450  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem (1123 bytes)
	I0317 10:51:13.462477  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem (1679 bytes)
	I0317 10:51:13.462522  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem (1708 bytes)
	I0317 10:51:13.462556  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:51:13.462573  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem -> /usr/share/ca-certificates/7572.pem
	I0317 10:51:13.462586  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem -> /usr/share/ca-certificates/75722.pem
	I0317 10:51:13.463168  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0317 10:51:13.489215  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0317 10:51:13.514120  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0317 10:51:13.538237  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0317 10:51:13.561328  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0317 10:51:13.585948  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0317 10:51:13.611047  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0317 10:51:13.635120  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0317 10:51:13.659603  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0317 10:51:13.684584  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem --> /usr/share/ca-certificates/7572.pem (1338 bytes)
	I0317 10:51:13.708842  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem --> /usr/share/ca-certificates/75722.pem (1708 bytes)
	I0317 10:51:13.732440  121672 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0317 10:51:13.750552  121672 ssh_runner.go:195] Run: openssl version
	I0317 10:51:13.755714  121672 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0317 10:51:13.756154  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0317 10:51:13.765601  121672 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:51:13.769294  121672 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Mar 17 10:26 /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:51:13.769317  121672 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Mar 17 10:26 /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:51:13.769372  121672 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:51:13.776138  121672 command_runner.go:130] > b5213941
	I0317 10:51:13.776553  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0317 10:51:13.785920  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7572.pem && ln -fs /usr/share/ca-certificates/7572.pem /etc/ssl/certs/7572.pem"
	I0317 10:51:13.795459  121672 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7572.pem
	I0317 10:51:13.799006  121672 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Mar 17 10:33 /usr/share/ca-certificates/7572.pem
	I0317 10:51:13.799033  121672 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Mar 17 10:33 /usr/share/ca-certificates/7572.pem
	I0317 10:51:13.799084  121672 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7572.pem
	I0317 10:51:13.805656  121672 command_runner.go:130] > 51391683
	I0317 10:51:13.806109  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7572.pem /etc/ssl/certs/51391683.0"
	I0317 10:51:13.815386  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/75722.pem && ln -fs /usr/share/ca-certificates/75722.pem /etc/ssl/certs/75722.pem"
	I0317 10:51:13.824705  121672 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/75722.pem
	I0317 10:51:13.828500  121672 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Mar 17 10:33 /usr/share/ca-certificates/75722.pem
	I0317 10:51:13.828579  121672 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Mar 17 10:33 /usr/share/ca-certificates/75722.pem
	I0317 10:51:13.828652  121672 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/75722.pem
	I0317 10:51:13.835380  121672 command_runner.go:130] > 3ec20f2e
	I0317 10:51:13.835779  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/75722.pem /etc/ssl/certs/3ec20f2e.0"
	I0317 10:51:13.845328  121672 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0317 10:51:13.848621  121672 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0317 10:51:13.848655  121672 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0317 10:51:13.848696  121672 kubeadm.go:392] StartCluster: {Name:multinode-286863 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServe
rNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false C
ustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 10:51:13.848766  121672 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0317 10:51:13.848845  121672 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0317 10:51:13.885386  121672 cri.go:89] found id: ""
	I0317 10:51:13.885498  121672 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0317 10:51:13.894391  121672 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0317 10:51:13.894418  121672 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0317 10:51:13.894425  121672 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0317 10:51:13.894494  121672 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0317 10:51:13.903596  121672 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0317 10:51:13.903662  121672 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0317 10:51:13.914533  121672 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0317 10:51:13.914558  121672 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0317 10:51:13.914567  121672 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0317 10:51:13.914575  121672 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0317 10:51:13.914611  121672 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0317 10:51:13.914626  121672 kubeadm.go:157] found existing configuration files:
	
	I0317 10:51:13.914695  121672 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0317 10:51:13.923891  121672 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0317 10:51:13.923983  121672 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0317 10:51:13.924044  121672 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0317 10:51:13.933097  121672 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0317 10:51:13.941070  121672 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0317 10:51:13.942041  121672 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0317 10:51:13.942102  121672 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0317 10:51:13.951875  121672 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0317 10:51:13.961228  121672 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0317 10:51:13.961363  121672 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0317 10:51:13.961430  121672 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0317 10:51:13.970950  121672 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0317 10:51:13.980213  121672 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0317 10:51:13.980302  121672 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0317 10:51:13.980379  121672 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0317 10:51:13.988956  121672 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0317 10:51:14.046266  121672 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0317 10:51:14.046339  121672 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0317 10:51:14.046543  121672 kubeadm.go:310] [preflight] Running pre-flight checks
	I0317 10:51:14.046579  121672 command_runner.go:130] > [preflight] Running pre-flight checks
	I0317 10:51:14.072627  121672 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0317 10:51:14.072707  121672 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0317 10:51:14.072806  121672 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1077-aws
	I0317 10:51:14.072846  121672 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1077-aws
	I0317 10:51:14.072913  121672 command_runner.go:130] > OS: Linux
	I0317 10:51:14.072939  121672 kubeadm.go:310] OS: Linux
	I0317 10:51:14.073017  121672 command_runner.go:130] > CGROUPS_CPU: enabled
	I0317 10:51:14.073049  121672 kubeadm.go:310] CGROUPS_CPU: enabled
	I0317 10:51:14.073133  121672 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0317 10:51:14.073183  121672 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0317 10:51:14.073288  121672 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0317 10:51:14.073310  121672 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0317 10:51:14.073389  121672 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0317 10:51:14.073412  121672 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0317 10:51:14.073504  121672 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0317 10:51:14.073527  121672 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0317 10:51:14.073607  121672 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0317 10:51:14.073630  121672 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0317 10:51:14.073720  121672 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0317 10:51:14.073744  121672 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0317 10:51:14.073825  121672 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0317 10:51:14.073847  121672 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0317 10:51:14.073937  121672 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0317 10:51:14.073963  121672 kubeadm.go:310] CGROUPS_BLKIO: enabled
	I0317 10:51:14.138665  121672 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0317 10:51:14.138735  121672 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0317 10:51:14.138995  121672 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0317 10:51:14.139031  121672 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0317 10:51:14.139173  121672 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0317 10:51:14.139201  121672 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0317 10:51:14.145060  121672 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0317 10:51:14.145300  121672 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0317 10:51:14.151309  121672 out.go:235]   - Generating certificates and keys ...
	I0317 10:51:14.151511  121672 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0317 10:51:14.151549  121672 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0317 10:51:14.151644  121672 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0317 10:51:14.151669  121672 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0317 10:51:14.407135  121672 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0317 10:51:14.407165  121672 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0317 10:51:15.294120  121672 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0317 10:51:15.294149  121672 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0317 10:51:15.623450  121672 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0317 10:51:15.623485  121672 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0317 10:51:16.190853  121672 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0317 10:51:16.190895  121672 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0317 10:51:16.389859  121672 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0317 10:51:16.389890  121672 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0317 10:51:16.390278  121672 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-286863] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0317 10:51:16.390296  121672 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-286863] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0317 10:51:16.631615  121672 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0317 10:51:16.631641  121672 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0317 10:51:16.631769  121672 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-286863] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0317 10:51:16.631785  121672 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-286863] and IPs [192.168.67.2 127.0.0.1 ::1]
	I0317 10:51:16.835217  121672 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0317 10:51:16.835246  121672 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0317 10:51:17.066105  121672 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0317 10:51:17.066135  121672 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0317 10:51:17.277032  121672 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0317 10:51:17.277066  121672 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0317 10:51:17.277424  121672 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0317 10:51:17.277439  121672 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0317 10:51:17.548909  121672 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0317 10:51:17.548941  121672 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0317 10:51:18.160012  121672 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0317 10:51:18.160041  121672 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0317 10:51:18.535178  121672 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0317 10:51:18.535210  121672 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0317 10:51:18.745392  121672 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0317 10:51:18.745422  121672 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0317 10:51:19.527234  121672 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0317 10:51:19.527259  121672 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0317 10:51:19.529752  121672 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0317 10:51:19.529779  121672 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0317 10:51:19.532900  121672 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0317 10:51:19.532998  121672 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0317 10:51:19.536393  121672 out.go:235]   - Booting up control plane ...
	I0317 10:51:19.536494  121672 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0317 10:51:19.536503  121672 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0317 10:51:19.536574  121672 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0317 10:51:19.536579  121672 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0317 10:51:19.536642  121672 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0317 10:51:19.536646  121672 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0317 10:51:19.547199  121672 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0317 10:51:19.547224  121672 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0317 10:51:19.553868  121672 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0317 10:51:19.553894  121672 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0317 10:51:19.554156  121672 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0317 10:51:19.554169  121672 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0317 10:51:19.652345  121672 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0317 10:51:19.652370  121672 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0317 10:51:19.652477  121672 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0317 10:51:19.652484  121672 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0317 10:51:20.653243  121672 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.0009214s
	I0317 10:51:20.653272  121672 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.0009214s
	I0317 10:51:20.653344  121672 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0317 10:51:20.653357  121672 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0317 10:51:26.655342  121672 kubeadm.go:310] [api-check] The API server is healthy after 6.002336716s
	I0317 10:51:26.655369  121672 command_runner.go:130] > [api-check] The API server is healthy after 6.002336716s
	I0317 10:51:26.677394  121672 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0317 10:51:26.677420  121672 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0317 10:51:26.692476  121672 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0317 10:51:26.692505  121672 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0317 10:51:26.719108  121672 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0317 10:51:26.719137  121672 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0317 10:51:26.719571  121672 kubeadm.go:310] [mark-control-plane] Marking the node multinode-286863 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0317 10:51:26.719613  121672 command_runner.go:130] > [mark-control-plane] Marking the node multinode-286863 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0317 10:51:26.731216  121672 kubeadm.go:310] [bootstrap-token] Using token: yrrz6l.3kgphv1pnhpvx7iz
	I0317 10:51:26.731310  121672 command_runner.go:130] > [bootstrap-token] Using token: yrrz6l.3kgphv1pnhpvx7iz
	I0317 10:51:26.734185  121672 out.go:235]   - Configuring RBAC rules ...
	I0317 10:51:26.734309  121672 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0317 10:51:26.734319  121672 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0317 10:51:26.741486  121672 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0317 10:51:26.741509  121672 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0317 10:51:26.749860  121672 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0317 10:51:26.749881  121672 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0317 10:51:26.753735  121672 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0317 10:51:26.753748  121672 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0317 10:51:26.757706  121672 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0317 10:51:26.757731  121672 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0317 10:51:26.763760  121672 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0317 10:51:26.763783  121672 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0317 10:51:27.064596  121672 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0317 10:51:27.064619  121672 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0317 10:51:27.490832  121672 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0317 10:51:27.490859  121672 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0317 10:51:28.062951  121672 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0317 10:51:28.062977  121672 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0317 10:51:28.064374  121672 kubeadm.go:310] 
	I0317 10:51:28.064453  121672 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0317 10:51:28.064466  121672 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0317 10:51:28.064473  121672 kubeadm.go:310] 
	I0317 10:51:28.064556  121672 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0317 10:51:28.064565  121672 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0317 10:51:28.064570  121672 kubeadm.go:310] 
	I0317 10:51:28.064599  121672 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0317 10:51:28.064607  121672 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0317 10:51:28.064666  121672 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0317 10:51:28.064675  121672 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0317 10:51:28.064725  121672 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0317 10:51:28.064738  121672 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0317 10:51:28.064746  121672 kubeadm.go:310] 
	I0317 10:51:28.064800  121672 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0317 10:51:28.064808  121672 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0317 10:51:28.064812  121672 kubeadm.go:310] 
	I0317 10:51:28.064862  121672 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0317 10:51:28.064871  121672 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0317 10:51:28.064876  121672 kubeadm.go:310] 
	I0317 10:51:28.064933  121672 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0317 10:51:28.064942  121672 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0317 10:51:28.065016  121672 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0317 10:51:28.065025  121672 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0317 10:51:28.065092  121672 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0317 10:51:28.065102  121672 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0317 10:51:28.065106  121672 kubeadm.go:310] 
	I0317 10:51:28.065192  121672 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0317 10:51:28.065205  121672 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0317 10:51:28.065282  121672 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0317 10:51:28.065290  121672 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0317 10:51:28.065295  121672 kubeadm.go:310] 
	I0317 10:51:28.065378  121672 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token yrrz6l.3kgphv1pnhpvx7iz \
	I0317 10:51:28.065386  121672 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token yrrz6l.3kgphv1pnhpvx7iz \
	I0317 10:51:28.065488  121672 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 \
	I0317 10:51:28.065497  121672 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 \
	I0317 10:51:28.065518  121672 kubeadm.go:310] 	--control-plane 
	I0317 10:51:28.065526  121672 command_runner.go:130] > 	--control-plane 
	I0317 10:51:28.065531  121672 kubeadm.go:310] 
	I0317 10:51:28.065615  121672 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0317 10:51:28.065623  121672 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0317 10:51:28.065628  121672 kubeadm.go:310] 
	I0317 10:51:28.065726  121672 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token yrrz6l.3kgphv1pnhpvx7iz \
	I0317 10:51:28.065735  121672 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token yrrz6l.3kgphv1pnhpvx7iz \
	I0317 10:51:28.065836  121672 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 
	I0317 10:51:28.065845  121672 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 
	I0317 10:51:28.069095  121672 kubeadm.go:310] 	[WARNING SystemVerification]: cgroups v1 support is in maintenance mode, please migrate to cgroups v2
	I0317 10:51:28.069126  121672 command_runner.go:130] ! 	[WARNING SystemVerification]: cgroups v1 support is in maintenance mode, please migrate to cgroups v2
	I0317 10:51:28.069350  121672 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1077-aws\n", err: exit status 1
	I0317 10:51:28.069360  121672 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1077-aws\n", err: exit status 1
	I0317 10:51:28.069474  121672 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0317 10:51:28.069481  121672 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0317 10:51:28.069497  121672 cni.go:84] Creating CNI manager for ""
	I0317 10:51:28.069507  121672 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0317 10:51:28.074488  121672 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0317 10:51:28.077338  121672 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0317 10:51:28.081147  121672 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0317 10:51:28.081169  121672 command_runner.go:130] >   Size: 4030506   	Blocks: 7880       IO Block: 4096   regular file
	I0317 10:51:28.081176  121672 command_runner.go:130] > Device: 36h/54d	Inode: 1322586     Links: 1
	I0317 10:51:28.081183  121672 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0317 10:51:28.081190  121672 command_runner.go:130] > Access: 2023-12-04 16:39:54.000000000 +0000
	I0317 10:51:28.081195  121672 command_runner.go:130] > Modify: 2023-12-04 16:39:54.000000000 +0000
	I0317 10:51:28.081200  121672 command_runner.go:130] > Change: 2025-03-17 10:25:40.923347102 +0000
	I0317 10:51:28.081205  121672 command_runner.go:130] >  Birth: 2025-03-17 10:25:40.879346171 +0000
	I0317 10:51:28.081256  121672 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0317 10:51:28.081264  121672 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0317 10:51:28.103896  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0317 10:51:28.348625  121672 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0317 10:51:28.359880  121672 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0317 10:51:28.368276  121672 command_runner.go:130] > serviceaccount/kindnet created
	I0317 10:51:28.383530  121672 command_runner.go:130] > daemonset.apps/kindnet created
	I0317 10:51:28.390136  121672 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0317 10:51:28.390266  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:28.390358  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-286863 minikube.k8s.io/updated_at=2025_03_17T10_51_28_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=28b3ce799b018a38b7c40f89b465976263272e76 minikube.k8s.io/name=multinode-286863 minikube.k8s.io/primary=true
	I0317 10:51:28.493359  121672 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0317 10:51:28.497401  121672 command_runner.go:130] > -16
	I0317 10:51:28.497474  121672 ops.go:34] apiserver oom_adj: -16
	I0317 10:51:28.497517  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:28.580213  121672 command_runner.go:130] > node/multinode-286863 labeled
	I0317 10:51:28.623905  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:28.998494  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:29.084520  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:29.498126  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:29.591470  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:29.997810  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:30.118548  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:30.498557  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:30.584992  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:30.997626  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:31.084465  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:31.497816  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:31.596090  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:31.998261  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:32.092514  121672 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0317 10:51:32.498176  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 10:51:32.689685  121672 command_runner.go:130] > NAME      SECRETS   AGE
	I0317 10:51:32.689707  121672 command_runner.go:130] > default   0         0s
	I0317 10:51:32.694237  121672 kubeadm.go:1113] duration metric: took 4.304003871s to wait for elevateKubeSystemPrivileges
	I0317 10:51:32.694269  121672 kubeadm.go:394] duration metric: took 18.845576392s to StartCluster
	I0317 10:51:32.694287  121672 settings.go:142] acquiring lock: {Name:mk05e4f82496d9c1bce10f4ad315347825261fba Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:32.694346  121672 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:51:32.694987  121672 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/kubeconfig: {Name:mk08eb5822f827f6c2a387a47497144ae27dff3b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:51:32.695200  121672 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0317 10:51:32.695316  121672 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0317 10:51:32.695549  121672 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:51:32.695571  121672 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0317 10:51:32.695692  121672 addons.go:69] Setting storage-provisioner=true in profile "multinode-286863"
	I0317 10:51:32.695708  121672 addons.go:238] Setting addon storage-provisioner=true in "multinode-286863"
	I0317 10:51:32.695732  121672 host.go:66] Checking if "multinode-286863" exists ...
	I0317 10:51:32.696201  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 10:51:32.696393  121672 addons.go:69] Setting default-storageclass=true in profile "multinode-286863"
	I0317 10:51:32.696488  121672 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-286863"
	I0317 10:51:32.696802  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 10:51:32.699212  121672 out.go:177] * Verifying Kubernetes components...
	I0317 10:51:32.702149  121672 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 10:51:32.749205  121672 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0317 10:51:32.749379  121672 loader.go:402] Config loaded from file:  /home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:51:32.749562  121672 kapi.go:59] client config for multinode-286863: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.crt", KeyFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.key", CAFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProt
os:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1e2c050), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0317 10:51:32.750066  121672 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0317 10:51:32.750078  121672 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0317 10:51:32.750083  121672 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0317 10:51:32.750088  121672 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0317 10:51:32.750354  121672 addons.go:238] Setting addon default-storageclass=true in "multinode-286863"
	I0317 10:51:32.750381  121672 host.go:66] Checking if "multinode-286863" exists ...
	I0317 10:51:32.750787  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 10:51:32.751027  121672 cert_rotation.go:140] Starting client certificate rotation controller
	I0317 10:51:32.753026  121672 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0317 10:51:32.753042  121672 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0317 10:51:32.753098  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:32.786954  121672 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0317 10:51:32.786976  121672 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0317 10:51:32.787045  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:51:32.795089  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:51:32.824682  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:51:33.020929  121672 command_runner.go:130] > apiVersion: v1
	I0317 10:51:33.020952  121672 command_runner.go:130] > data:
	I0317 10:51:33.020959  121672 command_runner.go:130] >   Corefile: |
	I0317 10:51:33.020963  121672 command_runner.go:130] >     .:53 {
	I0317 10:51:33.020967  121672 command_runner.go:130] >         errors
	I0317 10:51:33.020973  121672 command_runner.go:130] >         health {
	I0317 10:51:33.020978  121672 command_runner.go:130] >            lameduck 5s
	I0317 10:51:33.020981  121672 command_runner.go:130] >         }
	I0317 10:51:33.020985  121672 command_runner.go:130] >         ready
	I0317 10:51:33.020992  121672 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0317 10:51:33.021000  121672 command_runner.go:130] >            pods insecure
	I0317 10:51:33.021005  121672 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0317 10:51:33.021010  121672 command_runner.go:130] >            ttl 30
	I0317 10:51:33.021016  121672 command_runner.go:130] >         }
	I0317 10:51:33.021020  121672 command_runner.go:130] >         prometheus :9153
	I0317 10:51:33.021025  121672 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0317 10:51:33.021035  121672 command_runner.go:130] >            max_concurrent 1000
	I0317 10:51:33.021039  121672 command_runner.go:130] >         }
	I0317 10:51:33.021043  121672 command_runner.go:130] >         cache 30 {
	I0317 10:51:33.021053  121672 command_runner.go:130] >            disable success cluster.local
	I0317 10:51:33.021058  121672 command_runner.go:130] >            disable denial cluster.local
	I0317 10:51:33.021066  121672 command_runner.go:130] >         }
	I0317 10:51:33.021070  121672 command_runner.go:130] >         loop
	I0317 10:51:33.021079  121672 command_runner.go:130] >         reload
	I0317 10:51:33.021086  121672 command_runner.go:130] >         loadbalance
	I0317 10:51:33.021089  121672 command_runner.go:130] >     }
	I0317 10:51:33.021093  121672 command_runner.go:130] > kind: ConfigMap
	I0317 10:51:33.021097  121672 command_runner.go:130] > metadata:
	I0317 10:51:33.021104  121672 command_runner.go:130] >   creationTimestamp: "2025-03-17T10:51:27Z"
	I0317 10:51:33.021108  121672 command_runner.go:130] >   name: coredns
	I0317 10:51:33.021112  121672 command_runner.go:130] >   namespace: kube-system
	I0317 10:51:33.021117  121672 command_runner.go:130] >   resourceVersion: "263"
	I0317 10:51:33.021121  121672 command_runner.go:130] >   uid: 83b39ef6-4e60-47ef-969a-e9d0c80752b6
	I0317 10:51:33.025869  121672 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.67.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0317 10:51:33.025971  121672 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0317 10:51:33.115291  121672 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0317 10:51:33.177457  121672 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0317 10:51:33.700017  121672 command_runner.go:130] > configmap/coredns replaced
	I0317 10:51:33.704554  121672 loader.go:402] Config loaded from file:  /home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:51:33.704762  121672 kapi.go:59] client config for multinode-286863: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.crt", KeyFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.key", CAFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProt
os:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1e2c050), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0317 10:51:33.705167  121672 node_ready.go:35] waiting up to 6m0s for node "multinode-286863" to be "Ready" ...
	I0317 10:51:33.705287  121672 type.go:168] "Request Body" body=""
	I0317 10:51:33.705349  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:33.705377  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.705400  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.705425  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.705644  121672 start.go:971] {"host.minikube.internal": 192.168.67.1} host record injected into CoreDNS's ConfigMap
	I0317 10:51:33.706021  121672 loader.go:402] Config loaded from file:  /home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:51:33.706192  121672 kapi.go:59] client config for multinode-286863: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.crt", KeyFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.key", CAFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProt
os:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1e2c050), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0317 10:51:33.707891  121672 deployment.go:95] "Request Body" body=""
	I0317 10:51:33.707987  121672 round_trippers.go:470] GET https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0317 10:51:33.708008  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.708033  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.708066  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.725484  121672 round_trippers.go:581] Response Status: 200 OK in 20 milliseconds
	I0317 10:51:33.725503  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.725511  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.725515  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.725518  121672 round_trippers.go:587]     Audit-Id: 521b10e6-3368-4cfb-bb51-a5bfe75f42c6
	I0317 10:51:33.725520  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.725522  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.725525  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.725681  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:33.725805  121672 node_ready.go:49] node "multinode-286863" has status "Ready":"True"
	I0317 10:51:33.725816  121672 node_ready.go:38] duration metric: took 20.593694ms for node "multinode-286863" to be "Ready" ...
	I0317 10:51:33.725824  121672 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0317 10:51:33.725868  121672 type.go:204] "Request Body" body=""
	I0317 10:51:33.725899  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0317 10:51:33.725903  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.725910  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.725914  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.725982  121672 round_trippers.go:581] Response Status: 200 OK in 17 milliseconds
	I0317 10:51:33.725989  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.725995  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.725999  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.726003  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.726007  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.726010  121672 round_trippers.go:587]     Content-Length: 144
	I0317 10:51:33.726013  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.726016  121672 round_trippers.go:587]     Audit-Id: 9a9b226f-ba65-44c3-b1fe-c66de53390d0
	I0317 10:51:33.726038  121672 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 63 66 36  |be-system".*$cf6|
		00000040  36 30 37 38 34 2d 62 35  38 33 2d 34 34 62 63 2d  |60784-b583-44bc-|
		00000050  38 30 63 63 2d 38 64 34  61 62 35 66 61 36 65 66  |80cc-8d4ab5fa6ef|
		00000060  32 32 03 33 37 34 38 00  42 08 08 af fd df be 06  |22.3748.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0317 10:51:33.726109  121672 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 63 66 36  |be-system".*$cf6|
		00000040  36 30 37 38 34 2d 62 35  38 33 2d 34 34 62 63 2d  |60784-b583-44bc-|
		00000050  38 30 63 63 2d 38 64 34  61 62 35 66 61 36 65 66  |80cc-8d4ab5fa6ef|
		00000060  32 32 03 33 37 34 38 00  42 08 08 af fd df be 06  |22.3748.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0317 10:51:33.726137  121672 round_trippers.go:470] PUT https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0317 10:51:33.726142  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.726148  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.726153  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.726158  121672 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.730944  121672 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0317 10:51:33.730976  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.730985  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.730989  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.730992  121672 round_trippers.go:587]     Audit-Id: 2a46e9b7-2296-431f-b69d-a130afe29be3
	I0317 10:51:33.730996  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.730998  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.731001  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.732506  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 84 d8 02 0a  09 0a 00 12 03 33 38 38  |ist..........388|
		00000020  1a 00 12 de 24 0a 94 17  0a 18 63 6f 72 65 64 6e  |....$.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 62 36 6d  |s-668d6bf9bc-b6m|
		00000040  68 39 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |h9..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  63 30 64 66 63 30 37 62  |stem".*$c0dfc07b|
		00000070  2d 63 30 36 64 2d 34 38  37 34 2d 38 62 61 34 2d  |-c06d-4874-8ba4-|
		00000080  65 38 30 33 33 62 30 39  37 38 38 66 32 03 33 37  |e8033b09788f2.37|
		00000090  35 38 00 42 08 08 b4 fd  df be 06 10 00 5a 13 0a  |58.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 216541 chars]
	 >
	I0317 10:51:33.732909  121672 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:33.732990  121672 type.go:168] "Request Body" body=""
	I0317 10:51:33.733062  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:33.733085  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.733110  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.733131  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.735984  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:33.736032  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.736052  121672 round_trippers.go:587]     Audit-Id: 3b885396-47b3-4adf-9759-6130c6b25712
	I0317 10:51:33.736073  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.736106  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.736125  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.736141  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.736160  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.736870  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:33.737063  121672 type.go:168] "Request Body" body=""
	I0317 10:51:33.737125  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:33.737143  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.737179  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.737206  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.737363  121672 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0317 10:51:33.737394  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.737414  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.737435  121672 round_trippers.go:587]     Audit-Id: 52fb135d-fd0e-4eb7-91be-1e6e573ac32e
	I0317 10:51:33.737470  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.737499  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.737516  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.737534  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.737553  121672 round_trippers.go:587]     Content-Length: 144
	I0317 10:51:33.737606  121672 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 63 66 36  |be-system".*$cf6|
		00000040  36 30 37 38 34 2d 62 35  38 33 2d 34 34 62 63 2d  |60784-b583-44bc-|
		00000050  38 30 63 63 2d 38 64 34  61 62 35 66 61 36 65 66  |80cc-8d4ab5fa6ef|
		00000060  32 32 03 33 38 39 38 00  42 08 08 af fd df be 06  |22.3898.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0317 10:51:33.744325  121672 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0317 10:51:33.744386  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.744408  121672 round_trippers.go:587]     Audit-Id: cbdb9101-1979-4742-9df1-e0d0bbe9645b
	I0317 10:51:33.744431  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.744464  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.744489  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.744509  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.744530  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.744785  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:33.908584  121672 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0317 10:51:33.914666  121672 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0317 10:51:33.923486  121672 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0317 10:51:33.934448  121672 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0317 10:51:33.942704  121672 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0317 10:51:33.957106  121672 command_runner.go:130] > pod/storage-provisioner created
	I0317 10:51:33.976009  121672 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0317 10:51:33.976114  121672 type.go:204] "Request Body" body=""
	I0317 10:51:33.976207  121672 round_trippers.go:470] GET https://192.168.67.2:8443/apis/storage.k8s.io/v1/storageclasses
	I0317 10:51:33.976217  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.976226  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.976232  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.978677  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:33.978719  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.978729  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.978733  121672 round_trippers.go:587]     Audit-Id: 42e647d8-75a8-41fd-9310-bc9762656f44
	I0317 10:51:33.978737  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.978740  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.978744  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.978747  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.978754  121672 round_trippers.go:587]     Content-Length: 957
	I0317 10:51:33.978851  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 34 30 37 1a  00 12 fd 06 0a cd 06 0a  |....407.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 35 37 64 63 31 65 36  32 2d 34 32 34 37 2d 34  |$57dc1e62-4247-4|
		00000060  35 35 34 2d 39 61 39 35  2d 33 61 35 62 38 64 63  |554-9a95-3a5b8dc|
		00000070  66 62 64 66 63 32 03 33  38 37 38 00 42 08 08 b5  |fbdfc2.3878.B...|
		00000080  fd df be 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0317 10:51:33.979060  121672 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 35  |tandard....".*$5|
		00000040  37 64 63 31 65 36 32 2d  34 32 34 37 2d 34 35 35  |7dc1e62-4247-455|
		00000050  34 2d 39 61 39 35 2d 33  61 35 62 38 64 63 66 62  |4-9a95-3a5b8dcfb|
		00000060  64 66 63 32 03 33 38 37  38 00 42 08 08 b5 fd df  |dfc2.3878.B.....|
		00000070  be 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0317 10:51:33.979114  121672 round_trippers.go:470] PUT https://192.168.67.2:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0317 10:51:33.979125  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:33.979132  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:33.979136  121672 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.979140  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:33.981755  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:33.981808  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:33.981844  121672 round_trippers.go:587]     Audit-Id: a5331ad0-e345-4e58-abe9-afc7d8ce8c2c
	I0317 10:51:33.981864  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:33.981894  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:33.981915  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:33.981934  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:33.981952  121672 round_trippers.go:587]     Content-Length: 939
	I0317 10:51:33.981987  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:33 GMT
	I0317 10:51:33.982289  121672 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 35  |tandard....".*$5|
		00000040  37 64 63 31 65 36 32 2d  34 32 34 37 2d 34 35 35  |7dc1e62-4247-455|
		00000050  34 2d 39 61 39 35 2d 33  61 35 62 38 64 63 66 62  |4-9a95-3a5b8dcfb|
		00000060  64 66 63 32 03 33 38 37  38 00 42 08 08 b5 fd df  |dfc2.3878.B.....|
		00000070  be 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0317 10:51:33.988083  121672 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0317 10:51:33.992237  121672 addons.go:514] duration metric: took 1.296661594s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0317 10:51:34.206569  121672 deployment.go:95] "Request Body" body=""
	I0317 10:51:34.206669  121672 round_trippers.go:470] GET https://192.168.67.2:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0317 10:51:34.206696  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:34.206708  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:34.206713  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:34.209468  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:34.209498  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:34.209507  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:34.209512  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:34.209515  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:34.209520  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:34.209523  121672 round_trippers.go:587]     Content-Length: 144
	I0317 10:51:34.209526  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:34 GMT
	I0317 10:51:34.209529  121672 round_trippers.go:587]     Audit-Id: 94a85607-47c4-4578-b213-3f512b5b9513
	I0317 10:51:34.209575  121672 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 63 66 36  |be-system".*$cf6|
		00000040  36 30 37 38 34 2d 62 35  38 33 2d 34 34 62 63 2d  |60784-b583-44bc-|
		00000050  38 30 63 63 2d 38 64 34  61 62 35 66 61 36 65 66  |80cc-8d4ab5fa6ef|
		00000060  32 32 03 33 39 38 38 00  42 08 08 af fd df be 06  |22.3988.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0317 10:51:34.209635  121672 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-286863" context rescaled to 1 replicas
	I0317 10:51:34.233777  121672 type.go:168] "Request Body" body=""
	I0317 10:51:34.233920  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:34.233937  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:34.233968  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:34.233982  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:34.236335  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:34.236366  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:34.236375  121672 round_trippers.go:587]     Audit-Id: be9b455e-41b0-4af1-bb67-69d8d8512d49
	I0317 10:51:34.236380  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:34.236384  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:34.236388  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:34.236393  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:34.236395  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:34 GMT
	I0317 10:51:34.236633  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:34.236811  121672 type.go:168] "Request Body" body=""
	I0317 10:51:34.236868  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:34.236880  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:34.236887  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:34.236892  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:34.239623  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:34.239682  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:34.239704  121672 round_trippers.go:587]     Audit-Id: 9a3d44a3-6312-4323-9248-133c0efab61a
	I0317 10:51:34.239725  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:34.239760  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:34.239784  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:34.239803  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:34.239824  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:34 GMT
	I0317 10:51:34.240132  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:34.733499  121672 type.go:168] "Request Body" body=""
	I0317 10:51:34.733642  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:34.733682  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:34.733705  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:34.733730  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:34.736089  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:34.736149  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:34.736171  121672 round_trippers.go:587]     Audit-Id: a5cb9ff4-0c77-4150-8bc0-5684d8bec841
	I0317 10:51:34.736194  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:34.736227  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:34.736252  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:34.736271  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:34.736295  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:34 GMT
	I0317 10:51:34.736553  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:34.736742  121672 type.go:168] "Request Body" body=""
	I0317 10:51:34.736803  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:34.736822  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:34.736857  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:34.736885  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:34.738796  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:34.738849  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:34.738901  121672 round_trippers.go:587]     Audit-Id: 0980c2e1-349e-4a18-bf26-956d7ea61c58
	I0317 10:51:34.738927  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:34.738950  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:34.738970  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:34.739003  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:34.739023  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:34 GMT
	I0317 10:51:34.739418  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:35.233105  121672 type.go:168] "Request Body" body=""
	I0317 10:51:35.233235  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:35.233268  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:35.233294  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:35.233317  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:35.235898  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:35.235963  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:35.235985  121672 round_trippers.go:587]     Audit-Id: 59ef2d2d-8c79-4eb3-8fe1-52b65b6ceddd
	I0317 10:51:35.236006  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:35.236040  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:35.236059  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:35.236081  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:35.236100  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:35 GMT
	I0317 10:51:35.236788  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:35.236972  121672 type.go:168] "Request Body" body=""
	I0317 10:51:35.237018  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:35.237026  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:35.237034  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:35.237038  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:35.239078  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:35.239149  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:35.239164  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:35.239170  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:35.239174  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:35 GMT
	I0317 10:51:35.239179  121672 round_trippers.go:587]     Audit-Id: e8967d83-c83f-4eab-ba48-4ec243ca0600
	I0317 10:51:35.239182  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:35.239186  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:35.239663  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:35.733138  121672 type.go:168] "Request Body" body=""
	I0317 10:51:35.733214  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:35.733224  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:35.733236  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:35.733247  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:35.736319  121672 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0317 10:51:35.736395  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:35.736419  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:35 GMT
	I0317 10:51:35.736440  121672 round_trippers.go:587]     Audit-Id: 5742f4e8-643e-4c7d-9517-32a904271d94
	I0317 10:51:35.736476  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:35.736499  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:35.736520  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:35.736556  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:35.737273  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:35.737507  121672 type.go:168] "Request Body" body=""
	I0317 10:51:35.737586  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:35.737611  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:35.737649  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:35.737680  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:35.740559  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:35.740614  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:35.740635  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:35.740665  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:35 GMT
	I0317 10:51:35.740670  121672 round_trippers.go:587]     Audit-Id: e2cce842-6b2a-476e-b52f-0371ea10038e
	I0317 10:51:35.740674  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:35.740678  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:35.740681  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:35.740929  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:35.741056  121672 pod_ready.go:103] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"False"
	I0317 10:51:36.233829  121672 type.go:168] "Request Body" body=""
	I0317 10:51:36.233940  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:36.233954  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:36.233975  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:36.233987  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:36.236210  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:36.236234  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:36.236244  121672 round_trippers.go:587]     Audit-Id: bbab8a7d-4023-43fa-a2ff-43b0d34028a5
	I0317 10:51:36.236250  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:36.236253  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:36.236257  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:36.236262  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:36.236266  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:36 GMT
	I0317 10:51:36.236644  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:36.236786  121672 type.go:168] "Request Body" body=""
	I0317 10:51:36.236829  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:36.236839  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:36.236847  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:36.236852  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:36.238580  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:36.238633  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:36.238654  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:36.238674  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:36.238683  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:36.238714  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:36.238733  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:36 GMT
	I0317 10:51:36.238738  121672 round_trippers.go:587]     Audit-Id: bc7f86cd-48c3-4d73-8f8e-ac35bd3e7a01
	I0317 10:51:36.238961  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:36.733876  121672 type.go:168] "Request Body" body=""
	I0317 10:51:36.733957  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:36.733967  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:36.733976  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:36.733985  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:36.736541  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:36.736607  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:36.736629  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:36.736652  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:36 GMT
	I0317 10:51:36.736690  121672 round_trippers.go:587]     Audit-Id: a680c4ae-b942-447c-892a-daab1f16414c
	I0317 10:51:36.736715  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:36.736735  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:36.736769  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:36.737029  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:36.737193  121672 type.go:168] "Request Body" body=""
	I0317 10:51:36.737236  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:36.737246  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:36.737253  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:36.737259  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:36.739265  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:36.739325  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:36.739348  121672 round_trippers.go:587]     Audit-Id: d3d16dae-0d5c-400e-9347-32b2a31f7158
	I0317 10:51:36.739370  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:36.739401  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:36.739412  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:36.739416  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:36.739420  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:36 GMT
	I0317 10:51:36.739641  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:37.233197  121672 type.go:168] "Request Body" body=""
	I0317 10:51:37.233282  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:37.233301  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:37.233312  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:37.233318  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:37.235953  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:37.235974  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:37.235984  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:37 GMT
	I0317 10:51:37.235989  121672 round_trippers.go:587]     Audit-Id: 1007f69e-b210-4d0a-a18e-5a8bec87957e
	I0317 10:51:37.235992  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:37.235996  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:37.235998  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:37.236001  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:37.236216  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:37.236347  121672 type.go:168] "Request Body" body=""
	I0317 10:51:37.236386  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:37.236390  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:37.236397  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:37.236401  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:37.238565  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:37.238593  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:37.238602  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:37.238606  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:37.238609  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:37.238611  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:37 GMT
	I0317 10:51:37.238614  121672 round_trippers.go:587]     Audit-Id: 1144a8b3-0d8d-41ce-aaf5-f5334aa25e66
	I0317 10:51:37.238616  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:37.238953  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:37.733733  121672 type.go:168] "Request Body" body=""
	I0317 10:51:37.733804  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:37.733814  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:37.733822  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:37.733831  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:37.736091  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:37.736152  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:37.736174  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:37.736195  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:37 GMT
	I0317 10:51:37.736225  121672 round_trippers.go:587]     Audit-Id: 255efb99-1933-4722-81bf-78dd760887b9
	I0317 10:51:37.736253  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:37.736271  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:37.736291  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:37.736529  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:37.736674  121672 type.go:168] "Request Body" body=""
	I0317 10:51:37.736716  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:37.736726  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:37.736734  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:37.736741  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:37.738594  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:37.738614  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:37.738622  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:37.738627  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:37.738631  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:37 GMT
	I0317 10:51:37.738637  121672 round_trippers.go:587]     Audit-Id: dc8d9028-431e-448b-b0be-66bbe127ef25
	I0317 10:51:37.738642  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:37.738649  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:37.739068  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 33 33  32 38 00 42 08 08 ac fd  |5f582.3328.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:38.233886  121672 type.go:168] "Request Body" body=""
	I0317 10:51:38.233973  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:38.233984  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:38.233993  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:38.233998  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:38.236343  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:38.236369  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:38.236379  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:38.236384  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:38.236388  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:38.236393  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:38 GMT
	I0317 10:51:38.236397  121672 round_trippers.go:587]     Audit-Id: 212402d0-9ce2-45cc-adf5-531a205bdeec
	I0317 10:51:38.236400  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:38.236775  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:38.236994  121672 type.go:168] "Request Body" body=""
	I0317 10:51:38.237039  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:38.237048  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:38.237056  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:38.237064  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:38.238973  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:38.238998  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:38.239006  121672 round_trippers.go:587]     Audit-Id: 3b4a89d6-d9f9-4649-9869-31c28fbccd08
	I0317 10:51:38.239011  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:38.239014  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:38.239019  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:38.239024  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:38.239027  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:38 GMT
	I0317 10:51:38.239331  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:38.239438  121672 pod_ready.go:103] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"False"
	I0317 10:51:38.733143  121672 type.go:168] "Request Body" body=""
	I0317 10:51:38.733231  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:38.733241  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:38.733249  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:38.733257  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:38.735499  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:38.735534  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:38.735544  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:38 GMT
	I0317 10:51:38.735550  121672 round_trippers.go:587]     Audit-Id: dca8edcb-ff14-4294-a790-1df55ccd47a1
	I0317 10:51:38.735554  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:38.735559  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:38.735562  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:38.735564  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:38.735961  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:38.736104  121672 type.go:168] "Request Body" body=""
	I0317 10:51:38.736156  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:38.736166  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:38.736173  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:38.736178  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:38.738132  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:38.738169  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:38.738189  121672 round_trippers.go:587]     Audit-Id: 9e314a04-8f73-4fc2-a7f1-668f885389a2
	I0317 10:51:38.738199  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:38.738203  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:38.738206  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:38.738209  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:38.738212  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:38 GMT
	I0317 10:51:38.738480  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:39.233265  121672 type.go:168] "Request Body" body=""
	I0317 10:51:39.233346  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:39.233357  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:39.233366  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:39.233370  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:39.235689  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:39.235730  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:39.235739  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:39 GMT
	I0317 10:51:39.235743  121672 round_trippers.go:587]     Audit-Id: bae2d578-81d9-442e-8403-c6dfe38fb14c
	I0317 10:51:39.235747  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:39.235753  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:39.235756  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:39.235760  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:39.236153  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:39.236304  121672 type.go:168] "Request Body" body=""
	I0317 10:51:39.236348  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:39.236359  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:39.236366  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:39.236373  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:39.238221  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:39.238235  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:39.238242  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:39.238245  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:39.238249  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:39 GMT
	I0317 10:51:39.238252  121672 round_trippers.go:587]     Audit-Id: b0469882-8f9c-4007-ad41-9b13ed1b1bc5
	I0317 10:51:39.238254  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:39.238258  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:39.238488  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:39.733127  121672 type.go:168] "Request Body" body=""
	I0317 10:51:39.733218  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:39.733229  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:39.733238  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:39.733246  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:39.735518  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:39.735542  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:39.735548  121672 round_trippers.go:587]     Audit-Id: c311a89e-df94-4e6e-a915-a179f0f56aa3
	I0317 10:51:39.735553  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:39.735556  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:39.735560  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:39.735563  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:39.735565  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:39 GMT
	I0317 10:51:39.735911  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:39.736063  121672 type.go:168] "Request Body" body=""
	I0317 10:51:39.736104  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:39.736114  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:39.736121  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:39.736125  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:39.737961  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:39.738061  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:39.738080  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:39.738089  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:39.738094  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:39 GMT
	I0317 10:51:39.738098  121672 round_trippers.go:587]     Audit-Id: 9e6a587e-0fe1-4876-b75e-9f6704991f3b
	I0317 10:51:39.738101  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:39.738105  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:39.738282  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:40.234138  121672 type.go:168] "Request Body" body=""
	I0317 10:51:40.234232  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:40.234244  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:40.234253  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:40.234258  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:40.236602  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:40.236628  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:40.236636  121672 round_trippers.go:587]     Audit-Id: a6fd153a-c94f-4092-8bf4-00d1245cc535
	I0317 10:51:40.236642  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:40.236646  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:40.236649  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:40.236652  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:40.236654  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:40 GMT
	I0317 10:51:40.236990  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:40.237137  121672 type.go:168] "Request Body" body=""
	I0317 10:51:40.237181  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:40.237192  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:40.237200  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:40.237209  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:40.239153  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:40.239170  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:40.239178  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:40 GMT
	I0317 10:51:40.239183  121672 round_trippers.go:587]     Audit-Id: c4e550dd-f52d-417a-b137-ad895dd0f7c9
	I0317 10:51:40.239201  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:40.239204  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:40.239207  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:40.239210  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:40.239420  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:40.239519  121672 pod_ready.go:103] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"False"
	I0317 10:51:40.733127  121672 type.go:168] "Request Body" body=""
	I0317 10:51:40.733202  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:40.733215  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:40.733224  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:40.733233  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:40.735390  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:40.735419  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:40.735428  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:40.735433  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:40 GMT
	I0317 10:51:40.735438  121672 round_trippers.go:587]     Audit-Id: 36d466c3-41da-487e-b8b7-0efe8d73d3df
	I0317 10:51:40.735456  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:40.735467  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:40.735471  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:40.735976  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:40.736119  121672 type.go:168] "Request Body" body=""
	I0317 10:51:40.736163  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:40.736173  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:40.736181  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:40.736187  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:40.738023  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:40.738038  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:40.738046  121672 round_trippers.go:587]     Audit-Id: 3577eb04-e540-4f76-bd68-f5ed3ae08838
	I0317 10:51:40.738049  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:40.738053  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:40.738057  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:40.738061  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:40.738065  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:40 GMT
	I0317 10:51:40.738261  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:41.234039  121672 type.go:168] "Request Body" body=""
	I0317 10:51:41.234108  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:41.234115  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:41.234126  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:41.234130  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:41.236478  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:41.236505  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:41.236514  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:41.236518  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:41.236522  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:41.236526  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:41 GMT
	I0317 10:51:41.236530  121672 round_trippers.go:587]     Audit-Id: d3a0b7d2-14fb-4853-b3f9-783452dea426
	I0317 10:51:41.236533  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:41.236836  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:41.236993  121672 type.go:168] "Request Body" body=""
	I0317 10:51:41.237037  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:41.237048  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:41.237056  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:41.237064  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:41.238978  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:41.239001  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:41.239009  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:41.239013  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:41.239017  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:41.239020  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:41 GMT
	I0317 10:51:41.239024  121672 round_trippers.go:587]     Audit-Id: 54628c4f-e3ea-4c65-9943-7e9e4b23db07
	I0317 10:51:41.239028  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:41.239227  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:41.734044  121672 type.go:168] "Request Body" body=""
	I0317 10:51:41.734117  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:41.734128  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:41.734137  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:41.734146  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:41.736393  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:41.736413  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:41.736421  121672 round_trippers.go:587]     Audit-Id: 0ac82398-dcd4-429d-8c6c-9be416e2b47e
	I0317 10:51:41.736425  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:41.736428  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:41.736430  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:41.736433  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:41.736435  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:41 GMT
	I0317 10:51:41.736757  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:41.736901  121672 type.go:168] "Request Body" body=""
	I0317 10:51:41.736939  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:41.736945  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:41.736952  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:41.736957  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:41.738777  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:41.738794  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:41.738801  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:41.738808  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:41.738812  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:41.738816  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:41 GMT
	I0317 10:51:41.738820  121672 round_trippers.go:587]     Audit-Id: 71afe804-9e9f-4d1b-87c5-b99be3d3202a
	I0317 10:51:41.738823  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:41.739118  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:42.234041  121672 type.go:168] "Request Body" body=""
	I0317 10:51:42.234126  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:42.234134  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:42.234147  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:42.234154  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:42.237044  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:42.237073  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:42.237090  121672 round_trippers.go:587]     Audit-Id: 71cebd00-a418-460f-a60a-adeb497ca945
	I0317 10:51:42.237096  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:42.237101  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:42.237105  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:42.237108  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:42.237111  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:42 GMT
	I0317 10:51:42.237592  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:42.237757  121672 type.go:168] "Request Body" body=""
	I0317 10:51:42.237806  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:42.237816  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:42.237825  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:42.237832  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:42.240106  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:42.240133  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:42.240142  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:42.240147  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:42.240150  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:42 GMT
	I0317 10:51:42.240153  121672 round_trippers.go:587]     Audit-Id: 4b774d79-6c53-49cc-bbdc-fea9ec0c0e0b
	I0317 10:51:42.240156  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:42.240159  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:42.240376  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:42.240497  121672 pod_ready.go:103] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"False"
	I0317 10:51:42.733126  121672 type.go:168] "Request Body" body=""
	I0317 10:51:42.733196  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:42.733206  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:42.733215  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:42.733222  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:42.735558  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:42.735580  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:42.735588  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:42 GMT
	I0317 10:51:42.735592  121672 round_trippers.go:587]     Audit-Id: ca70c6bc-a327-4e5b-979e-bcfc385b9299
	I0317 10:51:42.735595  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:42.735597  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:42.735600  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:42.735602  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:42.736111  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:42.736280  121672 type.go:168] "Request Body" body=""
	I0317 10:51:42.736331  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:42.736341  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:42.736349  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:42.736355  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:42.738283  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:42.738303  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:42.738314  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:42.738318  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:42.738322  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:42.738325  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:42.738329  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:42 GMT
	I0317 10:51:42.738332  121672 round_trippers.go:587]     Audit-Id: cb358703-f0ee-4655-a603-307c486089e0
	I0317 10:51:42.738543  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:43.233510  121672 type.go:168] "Request Body" body=""
	I0317 10:51:43.233582  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:43.233594  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:43.233620  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:43.233630  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:43.235878  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:43.235902  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:43.235914  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:43 GMT
	I0317 10:51:43.235920  121672 round_trippers.go:587]     Audit-Id: f818a32a-f7e3-4f00-876d-83b11d9a5cc2
	I0317 10:51:43.235924  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:43.235927  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:43.235930  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:43.235932  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:43.236514  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:43.236662  121672 type.go:168] "Request Body" body=""
	I0317 10:51:43.236710  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:43.236721  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:43.236728  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:43.236739  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:43.238825  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:43.238842  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:43.238850  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:43.238854  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:43.238858  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:43 GMT
	I0317 10:51:43.238861  121672 round_trippers.go:587]     Audit-Id: 63b87a99-d807-4491-b753-2669291d555f
	I0317 10:51:43.238886  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:43.238890  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:43.239081  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:43.733741  121672 type.go:168] "Request Body" body=""
	I0317 10:51:43.733810  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:43.733820  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:43.733829  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:43.733838  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:43.736197  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:43.736222  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:43.736231  121672 round_trippers.go:587]     Audit-Id: 675e59e0-33f0-42e1-af82-06581bf04871
	I0317 10:51:43.736237  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:43.736241  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:43.736244  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:43.736248  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:43.736251  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:43 GMT
	I0317 10:51:43.736708  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:43.736863  121672 type.go:168] "Request Body" body=""
	I0317 10:51:43.736907  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:43.736917  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:43.736926  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:43.736930  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:43.738953  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:43.738975  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:43.738984  121672 round_trippers.go:587]     Audit-Id: 50179eaa-4e5d-4cbc-8e3f-8c9a48d49171
	I0317 10:51:43.738988  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:43.738991  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:43.738994  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:43.738998  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:43.739001  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:43 GMT
	I0317 10:51:43.739423  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:44.233121  121672 type.go:168] "Request Body" body=""
	I0317 10:51:44.233248  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:44.233261  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:44.233269  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:44.233275  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:44.235529  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:44.235550  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:44.235559  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:44.235563  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:44 GMT
	I0317 10:51:44.235566  121672 round_trippers.go:587]     Audit-Id: 5ba88591-e2d5-41b5-86d0-00d99bd2e052
	I0317 10:51:44.235569  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:44.235572  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:44.235575  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:44.235875  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:44.236045  121672 type.go:168] "Request Body" body=""
	I0317 10:51:44.236089  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:44.236099  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:44.236107  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:44.236113  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:44.237927  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:44.237949  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:44.237956  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:44.237960  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:44.237963  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:44.237966  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:44.237969  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:44 GMT
	I0317 10:51:44.237972  121672 round_trippers.go:587]     Audit-Id: 7d92dcd5-af77-4e40-8e63-629bd883cc5a
	I0317 10:51:44.238250  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:44.734089  121672 type.go:168] "Request Body" body=""
	I0317 10:51:44.734195  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:44.734215  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:44.734224  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:44.734232  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:44.736707  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:44.736726  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:44.736734  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:44.736740  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:44.736744  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:44.736747  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:44 GMT
	I0317 10:51:44.736750  121672 round_trippers.go:587]     Audit-Id: 28e9af1f-490a-41da-ae61-4f5406c0136a
	I0317 10:51:44.736753  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:44.736973  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:44.737113  121672 type.go:168] "Request Body" body=""
	I0317 10:51:44.737148  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:44.737153  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:44.737161  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:44.737167  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:44.739174  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:44.739196  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:44.739204  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:44.739209  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:44.739212  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:44.739215  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:44.739218  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:44 GMT
	I0317 10:51:44.739221  121672 round_trippers.go:587]     Audit-Id: ab9375f0-2cb3-4e47-a6ac-81aa28bca6be
	I0317 10:51:44.739706  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:44.739824  121672 pod_ready.go:103] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"False"
	I0317 10:51:45.233511  121672 type.go:168] "Request Body" body=""
	I0317 10:51:45.233621  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:45.233635  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:45.233645  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:45.233651  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:45.236525  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:45.236560  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:45.236570  121672 round_trippers.go:587]     Audit-Id: 9eaa40f1-813b-44dd-9c9c-21fea62cd788
	I0317 10:51:45.236575  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:45.236578  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:45.236583  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:45.236586  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:45.236589  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:45 GMT
	I0317 10:51:45.237040  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:45.237217  121672 type.go:168] "Request Body" body=""
	I0317 10:51:45.237277  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:45.237283  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:45.237292  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:45.237298  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:45.240137  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:45.240169  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:45.240179  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:45.240185  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:45.240189  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:45.240192  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:45 GMT
	I0317 10:51:45.240196  121672 round_trippers.go:587]     Audit-Id: 9c2b4f78-a783-4ed5-8327-d26ade552238
	I0317 10:51:45.240200  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:45.240392  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:45.733127  121672 type.go:168] "Request Body" body=""
	I0317 10:51:45.733233  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:45.733247  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:45.733256  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:45.733260  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:45.735766  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:45.735796  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:45.735806  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:45.735811  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:45.735814  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:45.735818  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:45 GMT
	I0317 10:51:45.735821  121672 round_trippers.go:587]     Audit-Id: 510a3330-6f94-4bdc-8254-26274278e196
	I0317 10:51:45.735823  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:45.736280  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:45.736449  121672 type.go:168] "Request Body" body=""
	I0317 10:51:45.736493  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:45.736517  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:45.736530  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:45.736536  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:45.738423  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:45.738445  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:45.738453  121672 round_trippers.go:587]     Audit-Id: e88c8e05-cadd-464b-b99c-b6570d6a9437
	I0317 10:51:45.738458  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:45.738461  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:45.738464  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:45.738467  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:45.738471  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:45 GMT
	I0317 10:51:45.738703  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:46.233167  121672 type.go:168] "Request Body" body=""
	I0317 10:51:46.233239  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:46.233246  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:46.233259  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:46.233275  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:46.235520  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:46.235540  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:46.235548  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:46.235552  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:46.235555  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:46.235558  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:46 GMT
	I0317 10:51:46.235560  121672 round_trippers.go:587]     Audit-Id: f06d353d-df0d-4b8d-a8d2-ad394b49259a
	I0317 10:51:46.235563  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:46.235879  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:46.236026  121672 type.go:168] "Request Body" body=""
	I0317 10:51:46.236074  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:46.236086  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:46.236094  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:46.236097  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:46.237857  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:46.237876  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:46.237883  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:46.237887  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:46.237891  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:46 GMT
	I0317 10:51:46.237895  121672 round_trippers.go:587]     Audit-Id: d33e7840-9e40-4c2b-8d5b-03d8331fe862
	I0317 10:51:46.237899  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:46.237902  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:46.238093  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:46.733921  121672 type.go:168] "Request Body" body=""
	I0317 10:51:46.733992  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:46.733999  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:46.734008  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:46.734024  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:46.736306  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:46.736328  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:46.736340  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:46.736344  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:46.736348  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:46.736352  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:46 GMT
	I0317 10:51:46.736354  121672 round_trippers.go:587]     Audit-Id: e8419353-43a9-406d-8717-17681d467d66
	I0317 10:51:46.736358  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:46.736614  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:46.736778  121672 type.go:168] "Request Body" body=""
	I0317 10:51:46.736826  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:46.736835  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:46.736843  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:46.736849  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:46.738858  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:46.738899  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:46.738908  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:46.738913  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:46.738918  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:46 GMT
	I0317 10:51:46.738921  121672 round_trippers.go:587]     Audit-Id: 079158b7-aeeb-4ac9-8cfd-07b24e667dc3
	I0317 10:51:46.738926  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:46.738928  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:46.739300  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:47.233079  121672 type.go:168] "Request Body" body=""
	I0317 10:51:47.233179  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:47.233192  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:47.233201  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:47.233205  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:47.235576  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:47.235600  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:47.235610  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:47.235614  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:47.235623  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:47.235627  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:47.235631  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:47 GMT
	I0317 10:51:47.235634  121672 round_trippers.go:587]     Audit-Id: e29121d2-c1a5-4f81-a007-3a4df2fdacb8
	I0317 10:51:47.236131  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:47.236281  121672 type.go:168] "Request Body" body=""
	I0317 10:51:47.236326  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:47.236336  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:47.236343  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:47.236349  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:47.238303  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:47.238328  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:47.238336  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:47 GMT
	I0317 10:51:47.238340  121672 round_trippers.go:587]     Audit-Id: c210ad83-0ee1-4bfd-a9b1-b58202931982
	I0317 10:51:47.238343  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:47.238346  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:47.238349  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:47.238351  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:47.238719  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:47.238817  121672 pod_ready.go:103] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"False"
	I0317 10:51:47.733172  121672 type.go:168] "Request Body" body=""
	I0317 10:51:47.733242  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:47.733252  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:47.733261  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:47.733269  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:47.735508  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:47.735532  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:47.735540  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:47.735546  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:47.735549  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:47.735569  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:47.735578  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:47 GMT
	I0317 10:51:47.735581  121672 round_trippers.go:587]     Audit-Id: eb003e11-22d1-4f74-aae4-aec9b3baa7b1
	I0317 10:51:47.735885  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:47.736028  121672 type.go:168] "Request Body" body=""
	I0317 10:51:47.736071  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:47.736081  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:47.736090  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:47.736094  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:47.737860  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:47.737882  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:47.737889  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:47.737893  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:47.737896  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:47 GMT
	I0317 10:51:47.737899  121672 round_trippers.go:587]     Audit-Id: 39b752bf-9f73-45f0-bb58-6ffe18ecb070
	I0317 10:51:47.737904  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:47.737907  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:47.738091  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:48.233911  121672 type.go:168] "Request Body" body=""
	I0317 10:51:48.233999  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:48.234012  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:48.234021  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:48.234028  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:48.236441  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:48.236470  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:48.236479  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:48.236485  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:48.236488  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:48.236492  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:48.236495  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:48 GMT
	I0317 10:51:48.236498  121672 round_trippers.go:587]     Audit-Id: b598c863-0ce3-4f21-a0db-e119769a0b6f
	I0317 10:51:48.236698  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:48.236838  121672 type.go:168] "Request Body" body=""
	I0317 10:51:48.236882  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:48.236892  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:48.236900  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:48.236908  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:48.238811  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:48.238835  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:48.238844  121672 round_trippers.go:587]     Audit-Id: de0ae629-84f1-44d5-8670-2458aba751ba
	I0317 10:51:48.238849  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:48.238852  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:48.238856  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:48.238859  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:48.238862  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:48 GMT
	I0317 10:51:48.239123  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:48.733940  121672 type.go:168] "Request Body" body=""
	I0317 10:51:48.734008  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:48.734018  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:48.734028  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:48.734038  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:48.736271  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:48.736294  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:48.736303  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:48.736307  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:48.736310  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:48.736315  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:48.736318  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:48 GMT
	I0317 10:51:48.736324  121672 round_trippers.go:587]     Audit-Id: af9e35d4-8aba-45ac-8030-b50f7f8f74ab
	I0317 10:51:48.736534  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:48.736670  121672 type.go:168] "Request Body" body=""
	I0317 10:51:48.736714  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:48.736725  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:48.736733  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:48.736739  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:48.738536  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:48.738553  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:48.738561  121672 round_trippers.go:587]     Audit-Id: 19788cdc-ab17-4a3f-ab5f-af20cd891b1f
	I0317 10:51:48.738564  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:48.738567  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:48.738571  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:48.738574  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:48.738578  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:48 GMT
	I0317 10:51:48.738768  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:49.233159  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.233233  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:49.233243  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.233251  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.233257  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.235613  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:49.235636  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.235645  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.235650  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.235657  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.235660  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.235664  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.235668  121672 round_trippers.go:587]     Audit-Id: 20061aaa-47ac-46d9-952a-228eee8b4c65
	I0317 10:51:49.235954  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  de 24 0a 94 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.$.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 33 37 35 38 00  |33b09788f2.3758.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22348 chars]
	 >
	I0317 10:51:49.236126  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.236169  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:49.236179  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.236187  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.236192  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.238220  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:49.238247  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.238256  121672 round_trippers.go:587]     Audit-Id: 4eb3b799-a6c9-4ba0-aabc-aca61cc7deb0
	I0317 10:51:49.238263  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.238269  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.238276  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.238279  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.238305  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.238644  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:49.733188  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.733262  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:51:49.733276  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.733284  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.733296  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.735535  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:49.735563  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.735571  121672 round_trippers.go:587]     Audit-Id: 84e86c10-dd95-4b2d-bc5b-034fa3b9595d
	I0317 10:51:49.735575  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.735580  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.735588  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.735592  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.735595  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.735876  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  c9 25 0a b7 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.%.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 34 34 38 38 00  |33b09788f2.4488.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22833 chars]
	 >
	I0317 10:51:49.736022  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.736079  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:49.736088  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.736096  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.736101  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.738064  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.738083  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.738090  121672 round_trippers.go:587]     Audit-Id: c49d6025-bb22-47b7-8396-3780ae095109
	I0317 10:51:49.738094  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.738097  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.738100  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.738105  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.738109  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.738415  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:49.738515  121672 pod_ready.go:93] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"True"
	I0317 10:51:49.738536  121672 pod_ready.go:82] duration metric: took 16.005581898s for pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.738546  121672 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-cvx2q" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.738580  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.738618  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-cvx2q
	I0317 10:51:49.738623  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.738630  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.738634  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.740449  121672 round_trippers.go:581] Response Status: 404 Not Found in 1 milliseconds
	I0317 10:51:49.740466  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.740473  121672 round_trippers.go:587]     Audit-Id: 1f71e471-15fd-45b1-9014-1dcb5a25e23a
	I0317 10:51:49.740478  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.740481  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.740486  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.740492  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.740496  121672 round_trippers.go:587]     Content-Length: 137
	I0317 10:51:49.740498  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.740532  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0c 0a 02  76 31 12 06 53 74 61 74  |k8s.....v1..Stat|
		00000010  75 73 12 71 0a 06 0a 00  12 00 1a 00 12 07 46 61  |us.q..........Fa|
		00000020  69 6c 75 72 65 1a 29 70  6f 64 73 20 22 63 6f 72  |ilure.)pods "cor|
		00000030  65 64 6e 73 2d 36 36 38  64 36 62 66 39 62 63 2d  |edns-668d6bf9bc-|
		00000040  63 76 78 32 71 22 20 6e  6f 74 20 66 6f 75 6e 64  |cvx2q" not found|
		00000050  22 08 4e 6f 74 46 6f 75  6e 64 2a 26 0a 18 63 6f  |".NotFound*&..co|
		00000060  72 65 64 6e 73 2d 36 36  38 64 36 62 66 39 62 63  |redns-668d6bf9bc|
		00000070  2d 63 76 78 32 71 12 00  1a 04 70 6f 64 73 28 00  |-cvx2q....pods(.|
		00000080  32 00 30 94 03 1a 00 22  00                       |2.0....".|
	 >
	I0317 10:51:49.740582  121672 pod_ready.go:98] error getting pod "coredns-668d6bf9bc-cvx2q" in "kube-system" namespace (skipping!): pods "coredns-668d6bf9bc-cvx2q" not found
	I0317 10:51:49.740594  121672 pod_ready.go:82] duration metric: took 2.04229ms for pod "coredns-668d6bf9bc-cvx2q" in "kube-system" namespace to be "Ready" ...
	E0317 10:51:49.740604  121672 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-668d6bf9bc-cvx2q" in "kube-system" namespace (skipping!): pods "coredns-668d6bf9bc-cvx2q" not found
	I0317 10:51:49.740611  121672 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.740640  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.740672  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-286863
	I0317 10:51:49.740677  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.740683  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.740687  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.742507  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.742526  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.742534  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.742540  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.742544  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.742548  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.742552  121672 round_trippers.go:587]     Audit-Id: 7929c0e1-e1a0-4df1-a2f8-99cb3ced38b7
	I0317 10:51:49.742558  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.742947  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  87 2b 0a 9a 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 32 38  36 38 36 33 12 00 1a 0b  |inode-286863....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 31  |kube-system".*$1|
		00000040  63 64 66 38 33 30 63 2d  30 64 34 31 2d 34 63 65  |cdf830c-0d41-4ce|
		00000050  37 2d 39 66 62 62 2d 31  35 37 34 66 62 30 65 38  |7-9fbb-1574fb0e8|
		00000060  33 64 66 32 03 34 32 38  38 00 42 08 08 af fd df  |3df2.4288.B.....|
		00000070  be 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4d  |.control-planebM|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26307 chars]
	 >
	I0317 10:51:49.743168  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.743208  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:49.743218  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.743226  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.743231  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.745104  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.745121  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.745130  121672 round_trippers.go:587]     Audit-Id: c79465d5-10e7-4778-826b-b11e32cc8878
	I0317 10:51:49.745136  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.745139  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.745141  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.745144  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.745146  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.745497  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:49.745610  121672 pod_ready.go:93] pod "etcd-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:51:49.745628  121672 pod_ready.go:82] duration metric: took 5.010959ms for pod "etcd-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.745641  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.745676  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.745718  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-286863
	I0317 10:51:49.745726  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.745734  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.745739  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.747663  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.747683  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.747691  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.747695  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.747700  121672 round_trippers.go:587]     Audit-Id: 99982b23-af98-4445-b3fa-2c930ec90fa7
	I0317 10:51:49.747703  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.747708  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.747714  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.747992  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  bd 39 0a dc 1f 0a 1f 6b  75 62 65 2d 61 70 69 73  |.9.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  32 38 36 38 36 33 12 00  1a 0b 6b 75 62 65 2d 73  |286863....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 62 34 35 37 35 33 33  |ystem".*$b457533|
		00000050  37 2d 37 39 31 37 2d 34  66 32 33 2d 62 31 65 32  |7-7917-4f23-b1e2|
		00000060  2d 63 36 33 31 37 33 39  31 34 35 62 65 32 03 34  |-c631739145be2.4|
		00000070  33 31 38 00 42 08 08 af  fd df be 06 10 00 5a 1b  |318.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 54 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebT.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 35461 chars]
	 >
	I0317 10:51:49.748119  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.748160  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:49.748170  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.748177  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.748181  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.749997  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.750029  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.750038  121672 round_trippers.go:587]     Audit-Id: d076879f-ee41-4032-b38c-85805fd92893
	I0317 10:51:49.750044  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.750048  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.750050  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.750053  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.750056  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.750284  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:49.750380  121672 pod_ready.go:93] pod "kube-apiserver-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:51:49.750397  121672 pod_ready.go:82] duration metric: took 4.747817ms for pod "kube-apiserver-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.750408  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.750464  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.750503  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-286863
	I0317 10:51:49.750512  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.750519  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.750522  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.752394  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.752414  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.752421  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.752427  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.752440  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.752447  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.752451  121672 round_trippers.go:587]     Audit-Id: 1edb54b9-5216-4d29-a01e-c8cd3e8e9239
	I0317 10:51:49.752460  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.752698  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a7 36 0a ca 20 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.6.. .(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 32 38 36 38 36 33 12  |ultinode-286863.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 33 62 38 39 66 36  39 36 2d 32 33 66 32 2d  |*$3b89f696-23f2-|
		00000060  34 64 34 35 2d 39 65 30  38 2d 65 33 32 30 32 38  |4d45-9e08-e32028|
		00000070  63 37 39 32 30 65 32 03  33 38 33 38 00 42 08 08  |c7920e2.3838.B..|
		00000080  af fd df be 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 33417 chars]
	 >
	I0317 10:51:49.752850  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.752889  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:49.752900  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.752908  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.752912  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.754615  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.754635  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.754644  121672 round_trippers.go:587]     Audit-Id: df05ec8a-923e-4a89-9ae5-c8231f21d426
	I0317 10:51:49.754647  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.754651  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.754654  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.754658  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.754662  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.755089  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:49.755190  121672 pod_ready.go:93] pod "kube-controller-manager-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:51:49.755208  121672 pod_ready.go:82] duration metric: took 4.792741ms for pod "kube-controller-manager-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.755219  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-9xbpl" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.755254  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.755298  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-9xbpl
	I0317 10:51:49.755308  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.755316  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.755321  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.757089  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:51:49.757104  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.757111  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.757115  121672 round_trippers.go:587]     Audit-Id: 9288a6ba-4c41-43f6-92f8-ea37dfa35473
	I0317 10:51:49.757119  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.757130  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.757146  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.757151  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.757446  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  8a 25 0a be 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 39 78 62 70 6c 12  0b 6b 75 62 65 2d 70 72  |y-9xbpl..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 66 61  32 35 36 33 63 2d 30 36  |m".*$afa2563c-06|
		00000050  37 62 2d 34 65 31 32 2d  62 38 66 34 2d 38 64 62  |7b-4e12-b8f4-8db|
		00000060  37 33 38 61 38 38 30 63  63 32 03 33 38 34 38 00  |738a880cc2.3848.|
		00000070  42 08 08 b4 fd df be 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22518 chars]
	 >
	I0317 10:51:49.757566  121672 type.go:168] "Request Body" body=""
	I0317 10:51:49.933938  121672 request.go:661] Waited for 176.306585ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:49.933992  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:49.933998  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:49.934007  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:49.934013  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:49.936252  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:49.936306  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:49.936350  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:49.936373  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:49 GMT
	I0317 10:51:49.936395  121672 round_trippers.go:587]     Audit-Id: dbfd4175-4d33-4307-868d-9c79a34b2247
	I0317 10:51:49.936435  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:49.936448  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:49.936452  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:49.936724  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:49.936824  121672 pod_ready.go:93] pod "kube-proxy-9xbpl" in "kube-system" namespace has status "Ready":"True"
	I0317 10:51:49.936839  121672 pod_ready.go:82] duration metric: took 181.60896ms for pod "kube-proxy-9xbpl" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.936851  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:49.936892  121672 type.go:168] "Request Body" body=""
	I0317 10:51:50.134326  121672 request.go:661] Waited for 197.366426ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-286863
	I0317 10:51:50.134443  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-286863
	I0317 10:51:50.134504  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:50.134513  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:50.134519  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:50.137087  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:50.137122  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:50.137131  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:50.137139  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:50.137145  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:50 GMT
	I0317 10:51:50.137148  121672 round_trippers.go:587]     Audit-Id: 5a67427e-36be-4310-b9d3-c24a81ad75eb
	I0317 10:51:50.137151  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:50.137154  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:50.137358  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  e3 22 0a 80 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  32 38 36 38 36 33 12 00  1a 0b 6b 75 62 65 2d 73  |286863....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 32 34 34 35 30 65 65  |ystem".*$24450ee|
		00000050  66 2d 61 66 65 66 2d 34  34 66 37 2d 61 37 32 39  |f-afef-44f7-a729|
		00000060  2d 32 31 39 30 63 34 64  35 65 34 35 34 32 03 34  |-2190c4d5e4542.4|
		00000070  33 34 38 00 42 08 08 af  fd df be 06 10 00 5a 1b  |348.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21089 chars]
	 >
	I0317 10:51:50.137505  121672 type.go:168] "Request Body" body=""
	I0317 10:51:50.333926  121672 request.go:661] Waited for 196.362295ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:50.334003  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:51:50.334009  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:50.334017  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:50.334028  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:50.336446  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:50.336543  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:50.336563  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:50.336568  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:50.336572  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:50 GMT
	I0317 10:51:50.336597  121672 round_trippers.go:587]     Audit-Id: 8bc8b60d-2d6a-429e-bc24-8d48d0cd1d11
	I0317 10:51:50.336619  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:50.336629  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:50.336834  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 f6 23 0a 82 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..#.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 32  37 38 00 42 08 08 ac fd  |5f582.4278.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 21804 chars]
	 >
	I0317 10:51:50.336964  121672 pod_ready.go:93] pod "kube-scheduler-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:51:50.336983  121672 pod_ready.go:82] duration metric: took 400.119867ms for pod "kube-scheduler-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:51:50.336992  121672 pod_ready.go:39] duration metric: took 16.61115758s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0317 10:51:50.337026  121672 api_server.go:52] waiting for apiserver process to appear ...
	I0317 10:51:50.337094  121672 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0317 10:51:50.347041  121672 command_runner.go:130] > 1436
	I0317 10:51:50.348176  121672 api_server.go:72] duration metric: took 17.652940252s to wait for apiserver process to appear ...
	I0317 10:51:50.348199  121672 api_server.go:88] waiting for apiserver healthz status ...
	I0317 10:51:50.348218  121672 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0317 10:51:50.355716  121672 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0317 10:51:50.355785  121672 discovery_client.go:658] "Request Body" body=""
	I0317 10:51:50.355838  121672 round_trippers.go:470] GET https://192.168.67.2:8443/version
	I0317 10:51:50.355848  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:50.355863  121672 round_trippers.go:480]     Accept: application/json, */*
	I0317 10:51:50.355866  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:50.356664  121672 round_trippers.go:581] Response Status: 200 OK in 0 milliseconds
	I0317 10:51:50.356682  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:50.356691  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:50.356696  121672 round_trippers.go:587]     Content-Length: 263
	I0317 10:51:50.356699  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:50 GMT
	I0317 10:51:50.356703  121672 round_trippers.go:587]     Audit-Id: 1c8e3f4d-eb68-4e83-beb4-87b5c8f66b92
	I0317 10:51:50.356707  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:50.356710  121672 round_trippers.go:587]     Content-Type: application/json
	I0317 10:51:50.356713  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:50.356774  121672 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/arm64"
		}
	 >
	I0317 10:51:50.356860  121672 api_server.go:141] control plane version: v1.32.2
	I0317 10:51:50.356876  121672 api_server.go:131] duration metric: took 8.671453ms to wait for apiserver health ...
	I0317 10:51:50.356884  121672 system_pods.go:43] waiting for kube-system pods to appear ...
	I0317 10:51:50.356921  121672 type.go:204] "Request Body" body=""
	I0317 10:51:50.534303  121672 request.go:661] Waited for 177.319127ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0317 10:51:50.534359  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0317 10:51:50.534365  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:50.534374  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:50.534379  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:50.537189  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:50.537215  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:50.537224  121672 round_trippers.go:587]     Audit-Id: ede29147-c8da-4f97-98b8-cfd3ae47fa18
	I0317 10:51:50.537229  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:50.537237  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:50.537242  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:50.537245  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:50.537252  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:50 GMT
	I0317 10:51:50.538330  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 9e cd 02 0a  09 0a 00 12 03 34 35 32  |ist..........452|
		00000020  1a 00 12 c9 25 0a b7 17  0a 18 63 6f 72 65 64 6e  |....%.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 62 36 6d  |s-668d6bf9bc-b6m|
		00000040  68 39 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |h9..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  63 30 64 66 63 30 37 62  |stem".*$c0dfc07b|
		00000070  2d 63 30 36 64 2d 34 38  37 34 2d 38 62 61 34 2d  |-c06d-4874-8ba4-|
		00000080  65 38 30 33 33 62 30 39  37 38 38 66 32 03 34 34  |e8033b09788f2.44|
		00000090  38 38 00 42 08 08 b4 fd  df be 06 10 00 5a 13 0a  |88.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 209741 chars]
	 >
	I0317 10:51:50.538664  121672 system_pods.go:59] 8 kube-system pods found
	I0317 10:51:50.538693  121672 system_pods.go:61] "coredns-668d6bf9bc-b6mh9" [c0dfc07b-c06d-4874-8ba4-e8033b09788f] Running
	I0317 10:51:50.538698  121672 system_pods.go:61] "etcd-multinode-286863" [1cdf830c-0d41-4ce7-9fbb-1574fb0e83df] Running
	I0317 10:51:50.538708  121672 system_pods.go:61] "kindnet-krz7g" [8becc413-8372-4406-b7a9-6e06891cb54a] Running
	I0317 10:51:50.538712  121672 system_pods.go:61] "kube-apiserver-multinode-286863" [b4575337-7917-4f23-b1e2-c631739145be] Running
	I0317 10:51:50.538720  121672 system_pods.go:61] "kube-controller-manager-multinode-286863" [3b89f696-23f2-4d45-9e08-e32028c7920e] Running
	I0317 10:51:50.538724  121672 system_pods.go:61] "kube-proxy-9xbpl" [afa2563c-067b-4e12-b8f4-8db738a880cc] Running
	I0317 10:51:50.538734  121672 system_pods.go:61] "kube-scheduler-multinode-286863" [24450eef-afef-44f7-a729-2190c4d5e454] Running
	I0317 10:51:50.538738  121672 system_pods.go:61] "storage-provisioner" [c3aadd96-8f0b-4481-af0f-a6d8a264b0ef] Running
	I0317 10:51:50.538743  121672 system_pods.go:74] duration metric: took 181.854133ms to wait for pod list to return data ...
	I0317 10:51:50.538751  121672 default_sa.go:34] waiting for default service account to be created ...
	I0317 10:51:50.538799  121672 type.go:204] "Request Body" body=""
	I0317 10:51:50.734247  121672 request.go:661] Waited for 195.399887ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0317 10:51:50.734305  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/default/serviceaccounts
	I0317 10:51:50.734311  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:50.734320  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:50.734328  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:50.737044  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:50.737075  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:50.737084  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:50.737090  121672 round_trippers.go:587]     Content-Length: 128
	I0317 10:51:50.737093  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:50 GMT
	I0317 10:51:50.737096  121672 round_trippers.go:587]     Audit-Id: 7b326109-7334-41c5-ba28-9099f07b4bf7
	I0317 10:51:50.737099  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:50.737102  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:50.737105  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:50.737163  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 35  33 1a 00 12 4f 0a 4d 0a  |......453...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 39  38 30 33 34 66 65 63 2d  |ult".*$98034fec-|
		00000050  32 61 61 38 2d 34 34 35  39 2d 38 34 63 39 2d 35  |2aa8-4459-84c9-5|
		00000060  64 64 65 62 36 39 62 66  38 30 64 32 03 33 33 33  |ddeb69bf80d2.333|
		00000070  38 00 42 08 08 b4 fd df  be 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0317 10:51:50.737215  121672 default_sa.go:45] found service account: "default"
	I0317 10:51:50.737232  121672 default_sa.go:55] duration metric: took 198.47282ms for default service account to be created ...
	I0317 10:51:50.737240  121672 system_pods.go:116] waiting for k8s-apps to be running ...
	I0317 10:51:50.737276  121672 type.go:204] "Request Body" body=""
	I0317 10:51:50.933658  121672 request.go:661] Waited for 196.342439ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0317 10:51:50.933714  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0317 10:51:50.933754  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:50.933766  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:50.933770  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:50.936627  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:50.936653  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:50.936662  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:50.936665  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:50 GMT
	I0317 10:51:50.936700  121672 round_trippers.go:587]     Audit-Id: 37cbecde-3e65-47d6-92f6-8fe2ff7f2652
	I0317 10:51:50.936711  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:50.936715  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:50.936718  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:50.937877  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 9e cd 02 0a  09 0a 00 12 03 34 35 33  |ist..........453|
		00000020  1a 00 12 c9 25 0a b7 17  0a 18 63 6f 72 65 64 6e  |....%.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 62 36 6d  |s-668d6bf9bc-b6m|
		00000040  68 39 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |h9..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  63 30 64 66 63 30 37 62  |stem".*$c0dfc07b|
		00000070  2d 63 30 36 64 2d 34 38  37 34 2d 38 62 61 34 2d  |-c06d-4874-8ba4-|
		00000080  65 38 30 33 33 62 30 39  37 38 38 66 32 03 34 34  |e8033b09788f2.44|
		00000090  38 38 00 42 08 08 b4 fd  df be 06 10 00 5a 13 0a  |88.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 209741 chars]
	 >
	I0317 10:51:50.938232  121672 system_pods.go:86] 8 kube-system pods found
	I0317 10:51:50.938256  121672 system_pods.go:89] "coredns-668d6bf9bc-b6mh9" [c0dfc07b-c06d-4874-8ba4-e8033b09788f] Running
	I0317 10:51:50.938263  121672 system_pods.go:89] "etcd-multinode-286863" [1cdf830c-0d41-4ce7-9fbb-1574fb0e83df] Running
	I0317 10:51:50.938268  121672 system_pods.go:89] "kindnet-krz7g" [8becc413-8372-4406-b7a9-6e06891cb54a] Running
	I0317 10:51:50.938277  121672 system_pods.go:89] "kube-apiserver-multinode-286863" [b4575337-7917-4f23-b1e2-c631739145be] Running
	I0317 10:51:50.938285  121672 system_pods.go:89] "kube-controller-manager-multinode-286863" [3b89f696-23f2-4d45-9e08-e32028c7920e] Running
	I0317 10:51:50.938292  121672 system_pods.go:89] "kube-proxy-9xbpl" [afa2563c-067b-4e12-b8f4-8db738a880cc] Running
	I0317 10:51:50.938296  121672 system_pods.go:89] "kube-scheduler-multinode-286863" [24450eef-afef-44f7-a729-2190c4d5e454] Running
	I0317 10:51:50.938300  121672 system_pods.go:89] "storage-provisioner" [c3aadd96-8f0b-4481-af0f-a6d8a264b0ef] Running
	I0317 10:51:50.938308  121672 system_pods.go:126] duration metric: took 201.062178ms to wait for k8s-apps to be running ...
	I0317 10:51:50.938318  121672 system_svc.go:44] waiting for kubelet service to be running ....
	I0317 10:51:50.938374  121672 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0317 10:51:50.949833  121672 system_svc.go:56] duration metric: took 11.50563ms WaitForService to wait for kubelet
	I0317 10:51:50.949863  121672 kubeadm.go:582] duration metric: took 18.254629174s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0317 10:51:50.949932  121672 node_conditions.go:102] verifying NodePressure condition ...
	I0317 10:51:50.949997  121672 type.go:204] "Request Body" body=""
	I0317 10:51:51.133325  121672 request.go:661] Waited for 183.263784ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes
	I0317 10:51:51.133387  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes
	I0317 10:51:51.133399  121672 round_trippers.go:476] Request Headers:
	I0317 10:51:51.133408  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:51:51.133412  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:51:51.136079  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:51:51.136104  121672 round_trippers.go:584] Response Headers:
	I0317 10:51:51.136124  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:51:51.136130  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:51:51.136133  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:51:51.136135  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:51:51.136138  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:51:51 GMT
	I0317 10:51:51.136141  121672 round_trippers.go:587]     Audit-Id: 63057afd-9916-456c-b017-fb40b10871c4
	I0317 10:51:51.136370  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 84 24 0a  09 0a 00 12 03 34 35 33  |List..$......453|
		00000020  1a 00 12 f6 23 0a 82 11  0a 10 6d 75 6c 74 69 6e  |....#.....multin|
		00000030  6f 64 65 2d 32 38 36 38  36 33 12 00 1a 00 22 00  |ode-286863....".|
		00000040  2a 24 65 39 39 33 61 62  63 64 2d 35 37 35 33 2d  |*$e993abcd-5753-|
		00000050  34 62 62 64 2d 38 61 30  35 2d 35 38 38 36 37 61  |4bbd-8a05-58867a|
		00000060  30 61 35 66 35 38 32 03  34 32 37 38 00 42 08 08  |0a5f582.4278.B..|
		00000070  ac fd df be 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 72 6d 36  34 5a 1e 0a 15 62 65 74  |ch..arm64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21885 chars]
	 >
	I0317 10:51:51.136489  121672 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0317 10:51:51.136514  121672 node_conditions.go:123] node cpu capacity is 2
	I0317 10:51:51.136527  121672 node_conditions.go:105] duration metric: took 186.589167ms to run NodePressure ...
	I0317 10:51:51.136550  121672 start.go:241] waiting for startup goroutines ...
	I0317 10:51:51.136557  121672 start.go:246] waiting for cluster config update ...
	I0317 10:51:51.136573  121672 start.go:255] writing updated cluster config ...
	I0317 10:51:51.140744  121672 out.go:201] 
	I0317 10:51:51.144131  121672 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:51:51.144235  121672 profile.go:143] Saving config to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/config.json ...
	I0317 10:51:51.147611  121672 out.go:177] * Starting "multinode-286863-m02" worker node in "multinode-286863" cluster
	I0317 10:51:51.151237  121672 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0317 10:51:51.154296  121672 out.go:177] * Pulling base image v0.0.46-1741860993-20523 ...
	I0317 10:51:51.157162  121672 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
	I0317 10:51:51.157200  121672 cache.go:56] Caching tarball of preloaded images
	I0317 10:51:51.157243  121672 image.go:81] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon
	I0317 10:51:51.157305  121672 preload.go:172] Found /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0317 10:51:51.157316  121672 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on containerd
	I0317 10:51:51.157421  121672 profile.go:143] Saving config to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/config.json ...
	I0317 10:51:51.183023  121672 image.go:100] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon, skipping pull
	I0317 10:51:51.183051  121672 cache.go:145] gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 exists in daemon, skipping load
	I0317 10:51:51.183072  121672 cache.go:230] Successfully downloaded all kic artifacts
	I0317 10:51:51.183096  121672 start.go:360] acquireMachinesLock for multinode-286863-m02: {Name:mka7d0bb0f5b0f3ccba74be1ccc8b70d6051f65c Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0317 10:51:51.183230  121672 start.go:364] duration metric: took 110.574µs to acquireMachinesLock for "multinode-286863-m02"
	I0317 10:51:51.183264  121672 start.go:93] Provisioning new machine with config: &{Name:multinode-286863 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount
9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0317 10:51:51.183340  121672 start.go:125] createHost starting for "m02" (driver="docker")
	I0317 10:51:51.186854  121672 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0317 10:51:51.187003  121672 start.go:159] libmachine.API.Create for "multinode-286863" (driver="docker")
	I0317 10:51:51.187047  121672 client.go:168] LocalClient.Create starting
	I0317 10:51:51.187121  121672 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem
	I0317 10:51:51.187166  121672 main.go:141] libmachine: Decoding PEM data...
	I0317 10:51:51.187184  121672 main.go:141] libmachine: Parsing certificate...
	I0317 10:51:51.187238  121672 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem
	I0317 10:51:51.187260  121672 main.go:141] libmachine: Decoding PEM data...
	I0317 10:51:51.187273  121672 main.go:141] libmachine: Parsing certificate...
	I0317 10:51:51.187498  121672 cli_runner.go:164] Run: docker network inspect multinode-286863 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0317 10:51:51.203940  121672 network_create.go:77] Found existing network {name:multinode-286863 subnet:0x4002333a10 gateway:[0 0 0 0 0 0 0 0 0 0 255 255 192 168 67 1] mtu:1500}
	I0317 10:51:51.203987  121672 kic.go:121] calculated static IP "192.168.67.3" for the "multinode-286863-m02" container
	I0317 10:51:51.204062  121672 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0317 10:51:51.221130  121672 cli_runner.go:164] Run: docker volume create multinode-286863-m02 --label name.minikube.sigs.k8s.io=multinode-286863-m02 --label created_by.minikube.sigs.k8s.io=true
	I0317 10:51:51.240562  121672 oci.go:103] Successfully created a docker volume multinode-286863-m02
	I0317 10:51:51.240658  121672 cli_runner.go:164] Run: docker run --rm --name multinode-286863-m02-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-286863-m02 --entrypoint /usr/bin/test -v multinode-286863-m02:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -d /var/lib
	I0317 10:51:51.891384  121672 oci.go:107] Successfully prepared a docker volume multinode-286863-m02
	I0317 10:51:51.891425  121672 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
	I0317 10:51:51.891446  121672 kic.go:194] Starting extracting preloaded images to volume ...
	I0317 10:51:51.891528  121672 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-286863-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -I lz4 -xf /preloaded.tar -C /extractDir
	I0317 10:51:56.175642  121672 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v multinode-286863-m02:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -I lz4 -xf /preloaded.tar -C /extractDir: (4.284068413s)
	I0317 10:51:56.175671  121672 kic.go:203] duration metric: took 4.284222407s to extract preloaded images to volume ...
	W0317 10:51:56.175822  121672 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0317 10:51:56.175938  121672 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0317 10:51:56.229928  121672 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname multinode-286863-m02 --name multinode-286863-m02 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=multinode-286863-m02 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=multinode-286863-m02 --network multinode-286863 --ip 192.168.67.3 --volume multinode-286863-m02:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185
	I0317 10:51:56.534101  121672 cli_runner.go:164] Run: docker container inspect multinode-286863-m02 --format={{.State.Running}}
	I0317 10:51:56.562234  121672 cli_runner.go:164] Run: docker container inspect multinode-286863-m02 --format={{.State.Status}}
	I0317 10:51:56.595487  121672 cli_runner.go:164] Run: docker exec multinode-286863-m02 stat /var/lib/dpkg/alternatives/iptables
	I0317 10:51:56.651270  121672 oci.go:144] the created container "multinode-286863-m02" has a running status.
	I0317 10:51:56.651298  121672 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa...
	I0317 10:51:57.307365  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa.pub -> /home/docker/.ssh/authorized_keys
	I0317 10:51:57.307466  121672 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0317 10:51:57.330942  121672 cli_runner.go:164] Run: docker container inspect multinode-286863-m02 --format={{.State.Status}}
	I0317 10:51:57.352639  121672 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0317 10:51:57.352658  121672 kic_runner.go:114] Args: [docker exec --privileged multinode-286863-m02 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0317 10:51:57.410026  121672 cli_runner.go:164] Run: docker container inspect multinode-286863-m02 --format={{.State.Status}}
	I0317 10:51:57.436590  121672 machine.go:93] provisionDockerMachine start ...
	I0317 10:51:57.436684  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:57.476277  121672 main.go:141] libmachine: Using SSH client type: native
	I0317 10:51:57.476596  121672 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 32913 <nil> <nil>}
	I0317 10:51:57.476611  121672 main.go:141] libmachine: About to run SSH command:
	hostname
	I0317 10:51:57.642262  121672 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-286863-m02
	
	I0317 10:51:57.642353  121672 ubuntu.go:169] provisioning hostname "multinode-286863-m02"
	I0317 10:51:57.642450  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:57.665465  121672 main.go:141] libmachine: Using SSH client type: native
	I0317 10:51:57.665777  121672 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 32913 <nil> <nil>}
	I0317 10:51:57.665791  121672 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-286863-m02 && echo "multinode-286863-m02" | sudo tee /etc/hostname
	I0317 10:51:57.819783  121672 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-286863-m02
	
	I0317 10:51:57.819868  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:57.842543  121672 main.go:141] libmachine: Using SSH client type: native
	I0317 10:51:57.842838  121672 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 32913 <nil> <nil>}
	I0317 10:51:57.842856  121672 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-286863-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-286863-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-286863-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0317 10:51:57.974733  121672 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0317 10:51:57.974763  121672 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/20535-2262/.minikube CaCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/20535-2262/.minikube}
	I0317 10:51:57.974779  121672 ubuntu.go:177] setting up certificates
	I0317 10:51:57.974788  121672 provision.go:84] configureAuth start
	I0317 10:51:57.974846  121672 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863-m02
	I0317 10:51:57.995184  121672 provision.go:143] copyHostCerts
	I0317 10:51:57.995232  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem -> /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem
	I0317 10:51:57.995267  121672 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem, removing ...
	I0317 10:51:57.995286  121672 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem
	I0317 10:51:57.995373  121672 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem (1123 bytes)
	I0317 10:51:57.995467  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem -> /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem
	I0317 10:51:57.995494  121672 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem, removing ...
	I0317 10:51:57.995502  121672 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem
	I0317 10:51:57.995550  121672 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem (1679 bytes)
	I0317 10:51:57.995663  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem -> /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem
	I0317 10:51:57.995695  121672 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem, removing ...
	I0317 10:51:57.995704  121672 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem
	I0317 10:51:57.995745  121672 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem (1078 bytes)
	I0317 10:51:57.995827  121672 provision.go:117] generating server cert: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem org=jenkins.multinode-286863-m02 san=[127.0.0.1 192.168.67.3 localhost minikube multinode-286863-m02]
	I0317 10:51:58.520440  121672 provision.go:177] copyRemoteCerts
	I0317 10:51:58.520508  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0317 10:51:58.520549  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:58.543373  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32913 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa Username:docker}
	I0317 10:51:58.635768  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem -> /etc/docker/server-key.pem
	I0317 10:51:58.635829  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0317 10:51:58.660155  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem -> /etc/docker/ca.pem
	I0317 10:51:58.660216  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0317 10:51:58.683815  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem -> /etc/docker/server.pem
	I0317 10:51:58.683918  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem --> /etc/docker/server.pem (1229 bytes)
	I0317 10:51:58.708346  121672 provision.go:87] duration metric: took 733.545224ms to configureAuth
	I0317 10:51:58.708376  121672 ubuntu.go:193] setting minikube options for container-runtime
	I0317 10:51:58.708644  121672 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:51:58.708661  121672 machine.go:96] duration metric: took 1.272050526s to provisionDockerMachine
	I0317 10:51:58.708668  121672 client.go:171] duration metric: took 7.521610328s to LocalClient.Create
	I0317 10:51:58.708703  121672 start.go:167] duration metric: took 7.521701135s to libmachine.API.Create "multinode-286863"
	I0317 10:51:58.708718  121672 start.go:293] postStartSetup for "multinode-286863-m02" (driver="docker")
	I0317 10:51:58.708735  121672 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0317 10:51:58.708805  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0317 10:51:58.708866  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:58.727305  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32913 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa Username:docker}
	I0317 10:51:58.817714  121672 ssh_runner.go:195] Run: cat /etc/os-release
	I0317 10:51:58.820728  121672 command_runner.go:130] > PRETTY_NAME="Ubuntu 22.04.5 LTS"
	I0317 10:51:58.820749  121672 command_runner.go:130] > NAME="Ubuntu"
	I0317 10:51:58.820756  121672 command_runner.go:130] > VERSION_ID="22.04"
	I0317 10:51:58.820763  121672 command_runner.go:130] > VERSION="22.04.5 LTS (Jammy Jellyfish)"
	I0317 10:51:58.820768  121672 command_runner.go:130] > VERSION_CODENAME=jammy
	I0317 10:51:58.820771  121672 command_runner.go:130] > ID=ubuntu
	I0317 10:51:58.820775  121672 command_runner.go:130] > ID_LIKE=debian
	I0317 10:51:58.820779  121672 command_runner.go:130] > HOME_URL="https://www.ubuntu.com/"
	I0317 10:51:58.820784  121672 command_runner.go:130] > SUPPORT_URL="https://help.ubuntu.com/"
	I0317 10:51:58.820807  121672 command_runner.go:130] > BUG_REPORT_URL="https://bugs.launchpad.net/ubuntu/"
	I0317 10:51:58.820821  121672 command_runner.go:130] > PRIVACY_POLICY_URL="https://www.ubuntu.com/legal/terms-and-policies/privacy-policy"
	I0317 10:51:58.820826  121672 command_runner.go:130] > UBUNTU_CODENAME=jammy
	I0317 10:51:58.821186  121672 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0317 10:51:58.821240  121672 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0317 10:51:58.821276  121672 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0317 10:51:58.821304  121672 info.go:137] Remote host: Ubuntu 22.04.5 LTS
	I0317 10:51:58.821342  121672 filesync.go:126] Scanning /home/jenkins/minikube-integration/20535-2262/.minikube/addons for local assets ...
	I0317 10:51:58.821426  121672 filesync.go:126] Scanning /home/jenkins/minikube-integration/20535-2262/.minikube/files for local assets ...
	I0317 10:51:58.821549  121672 filesync.go:149] local asset: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem -> 75722.pem in /etc/ssl/certs
	I0317 10:51:58.821587  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem -> /etc/ssl/certs/75722.pem
	I0317 10:51:58.821738  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0317 10:51:58.832440  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem --> /etc/ssl/certs/75722.pem (1708 bytes)
	I0317 10:51:58.858006  121672 start.go:296] duration metric: took 149.265526ms for postStartSetup
	I0317 10:51:58.858440  121672 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863-m02
	I0317 10:51:58.876029  121672 profile.go:143] Saving config to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/config.json ...
	I0317 10:51:58.876311  121672 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 10:51:58.876363  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:58.892557  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32913 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa Username:docker}
	I0317 10:51:58.979604  121672 command_runner.go:130] > 14%
	I0317 10:51:58.979681  121672 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0317 10:51:58.983864  121672 command_runner.go:130] > 168G
	I0317 10:51:58.984240  121672 start.go:128] duration metric: took 7.800887159s to createHost
	I0317 10:51:58.984260  121672 start.go:83] releasing machines lock for "multinode-286863-m02", held for 7.801016456s
	I0317 10:51:58.984340  121672 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863-m02
	I0317 10:51:59.005023  121672 out.go:177] * Found network options:
	I0317 10:51:59.008094  121672 out.go:177]   - NO_PROXY=192.168.67.2
	W0317 10:51:59.011109  121672 proxy.go:119] fail to check proxy env: Error ip not in block
	W0317 10:51:59.011162  121672 proxy.go:119] fail to check proxy env: Error ip not in block
	I0317 10:51:59.011237  121672 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0317 10:51:59.011287  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:59.011567  121672 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0317 10:51:59.011624  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 10:51:59.039946  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32913 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa Username:docker}
	I0317 10:51:59.040075  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32913 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa Username:docker}
	I0317 10:51:59.126908  121672 command_runner.go:130] >   File: /etc/cni/net.d/200-loopback.conf
	I0317 10:51:59.126938  121672 command_runner.go:130] >   Size: 54        	Blocks: 8          IO Block: 4096   regular file
	I0317 10:51:59.126945  121672 command_runner.go:130] > Device: d4h/212d	Inode: 1315290     Links: 1
	I0317 10:51:59.126952  121672 command_runner.go:130] > Access: (0644/-rw-r--r--)  Uid: (    0/    root)   Gid: (    0/    root)
	I0317 10:51:59.126959  121672 command_runner.go:130] > Access: 2023-06-14 14:44:50.000000000 +0000
	I0317 10:51:59.126965  121672 command_runner.go:130] > Modify: 2023-06-14 14:44:50.000000000 +0000
	I0317 10:51:59.126977  121672 command_runner.go:130] > Change: 2025-03-17 10:25:40.255332969 +0000
	I0317 10:51:59.126985  121672 command_runner.go:130] >  Birth: 2025-03-17 10:25:40.255332969 +0000
	I0317 10:51:59.127358  121672 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0317 10:51:59.257509  121672 command_runner.go:130] > <a href="https://github.com/kubernetes/registry.k8s.io">Temporary Redirect</a>.
	I0317 10:51:59.260777  121672 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0317 10:51:59.260875  121672 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0317 10:51:59.293209  121672 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf, 
	I0317 10:51:59.293256  121672 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0317 10:51:59.293264  121672 start.go:495] detecting cgroup driver to use...
	I0317 10:51:59.293295  121672 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0317 10:51:59.293345  121672 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0317 10:51:59.305717  121672 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0317 10:51:59.317227  121672 docker.go:217] disabling cri-docker service (if available) ...
	I0317 10:51:59.317324  121672 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0317 10:51:59.331884  121672 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0317 10:51:59.346783  121672 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0317 10:51:59.444721  121672 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0317 10:51:59.539929  121672 command_runner.go:130] ! Created symlink /etc/systemd/system/cri-docker.service → /dev/null.
	I0317 10:51:59.539962  121672 docker.go:233] disabling docker service ...
	I0317 10:51:59.540016  121672 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0317 10:51:59.561932  121672 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0317 10:51:59.575812  121672 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0317 10:51:59.682702  121672 command_runner.go:130] ! Removed /etc/systemd/system/sockets.target.wants/docker.socket.
	I0317 10:51:59.682798  121672 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0317 10:51:59.791277  121672 command_runner.go:130] ! Created symlink /etc/systemd/system/docker.service → /dev/null.
	I0317 10:51:59.791557  121672 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0317 10:51:59.804978  121672 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0317 10:51:59.824055  121672 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0317 10:51:59.826152  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0317 10:51:59.839001  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0317 10:51:59.849303  121672 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0317 10:51:59.849422  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0317 10:51:59.860661  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0317 10:51:59.871588  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0317 10:51:59.882122  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0317 10:51:59.893432  121672 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0317 10:51:59.903538  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0317 10:51:59.913791  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0317 10:51:59.924465  121672 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0317 10:51:59.935612  121672 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0317 10:51:59.943822  121672 command_runner.go:130] > net.bridge.bridge-nf-call-iptables = 1
	I0317 10:51:59.945043  121672 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0317 10:51:59.954068  121672 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 10:52:00.082218  121672 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0317 10:52:00.381015  121672 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0317 10:52:00.381097  121672 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0317 10:52:00.389246  121672 command_runner.go:130] >   File: /run/containerd/containerd.sock
	I0317 10:52:00.389268  121672 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0317 10:52:00.389276  121672 command_runner.go:130] > Device: f1h/241d	Inode: 175         Links: 1
	I0317 10:52:00.389283  121672 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: (    0/    root)
	I0317 10:52:00.389290  121672 command_runner.go:130] > Access: 2025-03-17 10:52:00.301240515 +0000
	I0317 10:52:00.389304  121672 command_runner.go:130] > Modify: 2025-03-17 10:52:00.301240515 +0000
	I0317 10:52:00.389311  121672 command_runner.go:130] > Change: 2025-03-17 10:52:00.301240515 +0000
	I0317 10:52:00.389315  121672 command_runner.go:130] >  Birth: -
	I0317 10:52:00.391128  121672 start.go:563] Will wait 60s for crictl version
	I0317 10:52:00.391202  121672 ssh_runner.go:195] Run: which crictl
	I0317 10:52:00.395795  121672 command_runner.go:130] > /usr/bin/crictl
	I0317 10:52:00.395952  121672 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0317 10:52:00.442719  121672 command_runner.go:130] > Version:  0.1.0
	I0317 10:52:00.442788  121672 command_runner.go:130] > RuntimeName:  containerd
	I0317 10:52:00.442810  121672 command_runner.go:130] > RuntimeVersion:  1.7.25
	I0317 10:52:00.442851  121672 command_runner.go:130] > RuntimeApiVersion:  v1
	I0317 10:52:00.446259  121672 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.25
	RuntimeApiVersion:  v1
	I0317 10:52:00.446384  121672 ssh_runner.go:195] Run: containerd --version
	I0317 10:52:00.468434  121672 command_runner.go:130] > containerd containerd.io 1.7.25 bcc810d6b9066471b0b6fa75f557a15a1cbf31bb
	I0317 10:52:00.470366  121672 ssh_runner.go:195] Run: containerd --version
	I0317 10:52:00.493352  121672 command_runner.go:130] > containerd containerd.io 1.7.25 bcc810d6b9066471b0b6fa75f557a15a1cbf31bb
	I0317 10:52:00.501527  121672 out.go:177] * Preparing Kubernetes v1.32.2 on containerd 1.7.25 ...
	I0317 10:52:00.504530  121672 out.go:177]   - env NO_PROXY=192.168.67.2
	I0317 10:52:00.507494  121672 cli_runner.go:164] Run: docker network inspect multinode-286863 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0317 10:52:00.526262  121672 ssh_runner.go:195] Run: grep 192.168.67.1	host.minikube.internal$ /etc/hosts
	I0317 10:52:00.530397  121672 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.67.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0317 10:52:00.542522  121672 mustload.go:65] Loading cluster: multinode-286863
	I0317 10:52:00.542736  121672 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:52:00.543101  121672 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 10:52:00.561723  121672 host.go:66] Checking if "multinode-286863" exists ...
	I0317 10:52:00.562012  121672 certs.go:68] Setting up /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863 for IP: 192.168.67.3
	I0317 10:52:00.562028  121672 certs.go:194] generating shared ca certs ...
	I0317 10:52:00.562043  121672 certs.go:226] acquiring lock for ca certs: {Name:mk5a5307154bd473cdb748bc6e62d2139b42123a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 10:52:00.562157  121672 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key
	I0317 10:52:00.562202  121672 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key
	I0317 10:52:00.562219  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt -> /var/lib/minikube/certs/ca.crt
	I0317 10:52:00.562235  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key -> /var/lib/minikube/certs/ca.key
	I0317 10:52:00.562255  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0317 10:52:00.562269  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0317 10:52:00.562328  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem (1338 bytes)
	W0317 10:52:00.562361  121672 certs.go:480] ignoring /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572_empty.pem, impossibly tiny 0 bytes
	I0317 10:52:00.562375  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem (1675 bytes)
	I0317 10:52:00.562400  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem (1078 bytes)
	I0317 10:52:00.562429  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem (1123 bytes)
	I0317 10:52:00.562451  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem (1679 bytes)
	I0317 10:52:00.562498  121672 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem (1708 bytes)
	I0317 10:52:00.562528  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:52:00.562600  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem -> /usr/share/ca-certificates/7572.pem
	I0317 10:52:00.562622  121672 vm_assets.go:164] NewFileAsset: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem -> /usr/share/ca-certificates/75722.pem
	I0317 10:52:00.562653  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0317 10:52:00.588484  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0317 10:52:00.614844  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0317 10:52:00.640119  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0317 10:52:00.667396  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0317 10:52:00.696041  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem --> /usr/share/ca-certificates/7572.pem (1338 bytes)
	I0317 10:52:00.721387  121672 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem --> /usr/share/ca-certificates/75722.pem (1708 bytes)
	I0317 10:52:00.746428  121672 ssh_runner.go:195] Run: openssl version
	I0317 10:52:00.751782  121672 command_runner.go:130] > OpenSSL 3.0.2 15 Mar 2022 (Library: OpenSSL 3.0.2 15 Mar 2022)
	I0317 10:52:00.752260  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7572.pem && ln -fs /usr/share/ca-certificates/7572.pem /etc/ssl/certs/7572.pem"
	I0317 10:52:00.761663  121672 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7572.pem
	I0317 10:52:00.765254  121672 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Mar 17 10:33 /usr/share/ca-certificates/7572.pem
	I0317 10:52:00.765367  121672 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Mar 17 10:33 /usr/share/ca-certificates/7572.pem
	I0317 10:52:00.765444  121672 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7572.pem
	I0317 10:52:00.772128  121672 command_runner.go:130] > 51391683
	I0317 10:52:00.772564  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7572.pem /etc/ssl/certs/51391683.0"
	I0317 10:52:00.782017  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/75722.pem && ln -fs /usr/share/ca-certificates/75722.pem /etc/ssl/certs/75722.pem"
	I0317 10:52:00.792630  121672 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/75722.pem
	I0317 10:52:00.796329  121672 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Mar 17 10:33 /usr/share/ca-certificates/75722.pem
	I0317 10:52:00.796546  121672 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Mar 17 10:33 /usr/share/ca-certificates/75722.pem
	I0317 10:52:00.796627  121672 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/75722.pem
	I0317 10:52:00.803316  121672 command_runner.go:130] > 3ec20f2e
	I0317 10:52:00.803786  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/75722.pem /etc/ssl/certs/3ec20f2e.0"
	I0317 10:52:00.815100  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0317 10:52:00.827561  121672 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:52:00.831441  121672 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Mar 17 10:26 /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:52:00.831544  121672 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Mar 17 10:26 /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:52:00.831600  121672 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0317 10:52:00.837668  121672 command_runner.go:130] > b5213941
	I0317 10:52:00.838094  121672 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0317 10:52:00.847180  121672 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0317 10:52:00.850051  121672 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0317 10:52:00.850343  121672 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0317 10:52:00.850401  121672 kubeadm.go:934] updating node {m02 192.168.67.3 8443 v1.32.2 containerd false true} ...
	I0317 10:52:00.850496  121672 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-286863-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=192.168.67.3
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0317 10:52:00.850561  121672 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0317 10:52:00.857954  121672 command_runner.go:130] > kubeadm
	I0317 10:52:00.858029  121672 command_runner.go:130] > kubectl
	I0317 10:52:00.858049  121672 command_runner.go:130] > kubelet
	I0317 10:52:00.858861  121672 binaries.go:44] Found k8s binaries, skipping transfer
	I0317 10:52:00.858982  121672 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system
	I0317 10:52:00.867508  121672 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (324 bytes)
	I0317 10:52:00.885014  121672 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0317 10:52:00.903459  121672 ssh_runner.go:195] Run: grep 192.168.67.2	control-plane.minikube.internal$ /etc/hosts
	I0317 10:52:00.907406  121672 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.67.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0317 10:52:00.917602  121672 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 10:52:01.006441  121672 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0317 10:52:01.027991  121672 host.go:66] Checking if "multinode-286863" exists ...
	I0317 10:52:01.028268  121672 start.go:317] joinCluster: &{Name:multinode-286863 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-286863 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerN
ames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.67.2 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true} {Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p
2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 10:52:01.028364  121672 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0317 10:52:01.028419  121672 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 10:52:01.048331  121672 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 10:52:01.215528  121672 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token e4c4cj.to2q92temqv4uz3y --discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 
	I0317 10:52:01.223855  121672 start.go:343] trying to join worker node "m02" to cluster: &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0317 10:52:01.223898  121672 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token e4c4cj.to2q92temqv4uz3y --discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=multinode-286863-m02"
	I0317 10:52:01.265987  121672 command_runner.go:130] > [preflight] Running pre-flight checks
	I0317 10:52:01.276994  121672 command_runner.go:130] > [preflight] The system verification failed. Printing the output from the verification:
	I0317 10:52:01.277019  121672 command_runner.go:130] > KERNEL_VERSION: 5.15.0-1077-aws
	I0317 10:52:01.277024  121672 command_runner.go:130] > OS: Linux
	I0317 10:52:01.277030  121672 command_runner.go:130] > CGROUPS_CPU: enabled
	I0317 10:52:01.277038  121672 command_runner.go:130] > CGROUPS_CPUACCT: enabled
	I0317 10:52:01.277044  121672 command_runner.go:130] > CGROUPS_CPUSET: enabled
	I0317 10:52:01.277049  121672 command_runner.go:130] > CGROUPS_DEVICES: enabled
	I0317 10:52:01.277054  121672 command_runner.go:130] > CGROUPS_FREEZER: enabled
	I0317 10:52:01.277059  121672 command_runner.go:130] > CGROUPS_MEMORY: enabled
	I0317 10:52:01.277066  121672 command_runner.go:130] > CGROUPS_PIDS: enabled
	I0317 10:52:01.277071  121672 command_runner.go:130] > CGROUPS_HUGETLB: enabled
	I0317 10:52:01.277075  121672 command_runner.go:130] > CGROUPS_BLKIO: enabled
	I0317 10:52:01.370626  121672 command_runner.go:130] > [preflight] Reading configuration from the "kubeadm-config" ConfigMap in namespace "kube-system"...
	I0317 10:52:01.370653  121672 command_runner.go:130] > [preflight] Use 'kubeadm init phase upload-config --config your-config.yaml' to re-upload it.
	I0317 10:52:01.412839  121672 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0317 10:52:01.413046  121672 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0317 10:52:01.413066  121672 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0317 10:52:01.540506  121672 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0317 10:52:03.041712  121672 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501292619s
	I0317 10:52:03.041739  121672 command_runner.go:130] > [kubelet-start] Waiting for the kubelet to perform the TLS Bootstrap
	I0317 10:52:03.064194  121672 command_runner.go:130] > This node has joined the cluster:
	I0317 10:52:03.064274  121672 command_runner.go:130] > * Certificate signing request was sent to apiserver and a response was received.
	I0317 10:52:03.064310  121672 command_runner.go:130] > * The Kubelet was informed of the new secure connection details.
	I0317 10:52:03.064339  121672 command_runner.go:130] > Run 'kubectl get nodes' on the control-plane to see this node join the cluster.
	I0317 10:52:03.068047  121672 command_runner.go:130] ! 	[WARNING SystemVerification]: cgroups v1 support is in maintenance mode, please migrate to cgroups v2
	I0317 10:52:03.068078  121672 command_runner.go:130] ! 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1077-aws\n", err: exit status 1
	I0317 10:52:03.068093  121672 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0317 10:52:03.068108  121672 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token e4c4cj.to2q92temqv4uz3y --discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 --ignore-preflight-errors=all --cri-socket unix:///run/containerd/containerd.sock --node-name=multinode-286863-m02": (1.844196566s)
	I0317 10:52:03.068125  121672 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0317 10:52:03.264507  121672 command_runner.go:130] ! Created symlink /etc/systemd/system/multi-user.target.wants/kubelet.service → /lib/systemd/system/kubelet.service.
	I0317 10:52:03.264595  121672 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-286863-m02 minikube.k8s.io/updated_at=2025_03_17T10_52_03_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=28b3ce799b018a38b7c40f89b465976263272e76 minikube.k8s.io/name=multinode-286863 minikube.k8s.io/primary=false
	I0317 10:52:03.365460  121672 command_runner.go:130] > node/multinode-286863-m02 labeled
	I0317 10:52:03.370999  121672 start.go:319] duration metric: took 2.342726955s to joinCluster
	I0317 10:52:03.371120  121672 start.go:235] Will wait 6m0s for node &{Name:m02 IP:192.168.67.3 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:false Worker:true}
	I0317 10:52:03.371388  121672 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:52:03.378125  121672 out.go:177] * Verifying Kubernetes components...
	I0317 10:52:03.381039  121672 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 10:52:03.478279  121672 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0317 10:52:03.492225  121672 loader.go:402] Config loaded from file:  /home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:52:03.492434  121672 kapi.go:59] client config for multinode-286863: &rest.Config{Host:"https://192.168.67.2:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.crt", KeyFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/profiles/multinode-286863/client.key", CAFile:"/home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), NextProt
os:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x1e2c050), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0317 10:52:03.492712  121672 node_ready.go:35] waiting up to 6m0s for node "multinode-286863-m02" to be "Ready" ...
	I0317 10:52:03.492768  121672 type.go:168] "Request Body" body=""
	I0317 10:52:03.492810  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:03.492820  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:03.492828  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:03.492832  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:03.495479  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:03.495500  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:03.495507  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:03 GMT
	I0317 10:52:03.495512  121672 round_trippers.go:587]     Audit-Id: d72d24b5-8d87-47d4-8a75-ff0853b1d39f
	I0317 10:52:03.495517  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:03.495519  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:03.495524  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:03.495527  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:03.495531  121672 round_trippers.go:587]     Content-Length: 4066
	I0317 10:52:03.495703  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 cb 1f 0a a2 0c 0a 14  6d 75 6c 74 69 6e 6f 64  |........multinod|
		00000020  65 2d 32 38 36 38 36 33  2d 6d 30 32 12 00 1a 00  |e-286863-m02....|
		00000030  22 00 2a 24 61 39 39 38  64 30 39 61 2d 37 61 38  |".*$a998d09a-7a8|
		00000040  63 2d 34 35 35 32 2d 38  33 37 33 2d 63 36 35 35  |c-4552-8373-c655|
		00000050  63 65 65 38 33 61 66 65  32 03 34 39 31 38 00 42  |cee83afe2.4918.B|
		00000060  08 08 d2 fd df be 06 10  00 5a 20 0a 17 62 65 74  |.........Z ..bet|
		00000070  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		00000080  61 72 63 68 12 05 61 72  6d 36 34 5a 1e 0a 15 62  |arch..arm64Z...b|
		00000090  65 74 61 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |eta.kubernetes.i|
		000000a0  6f 2f 6f 73 12 05 6c 69  6e 75 78 5a 1b 0a 12 6b  |o/os..linuxZ...k|
		000000b0  75 62 65 72 6e 65 74 65  73 2e 69 6f 2f 61 72 63  |ubernetes.io/arc|
		000000c0  68 12 05 61 72 6d 36 34  5a 2e 0a 16 6b 75 62 65  |h..arm64Z...kub [truncated 19107 chars]
	 >
	I0317 10:52:03.993511  121672 type.go:168] "Request Body" body=""
	I0317 10:52:03.993577  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:03.993583  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:03.993597  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:03.993613  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:03.996078  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:03.996102  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:03.996111  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:03.996115  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:03.996118  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:03.996121  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:03.996124  121672 round_trippers.go:587]     Content-Length: 4066
	I0317 10:52:03.996127  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:03 GMT
	I0317 10:52:03.996129  121672 round_trippers.go:587]     Audit-Id: 6e0eb6b8-e788-494b-8bfd-eaa0b2376544
	I0317 10:52:03.996277  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 cb 1f 0a a2 0c 0a 14  6d 75 6c 74 69 6e 6f 64  |........multinod|
		00000020  65 2d 32 38 36 38 36 33  2d 6d 30 32 12 00 1a 00  |e-286863-m02....|
		00000030  22 00 2a 24 61 39 39 38  64 30 39 61 2d 37 61 38  |".*$a998d09a-7a8|
		00000040  63 2d 34 35 35 32 2d 38  33 37 33 2d 63 36 35 35  |c-4552-8373-c655|
		00000050  63 65 65 38 33 61 66 65  32 03 34 39 31 38 00 42  |cee83afe2.4918.B|
		00000060  08 08 d2 fd df be 06 10  00 5a 20 0a 17 62 65 74  |.........Z ..bet|
		00000070  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		00000080  61 72 63 68 12 05 61 72  6d 36 34 5a 1e 0a 15 62  |arch..arm64Z...b|
		00000090  65 74 61 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |eta.kubernetes.i|
		000000a0  6f 2f 6f 73 12 05 6c 69  6e 75 78 5a 1b 0a 12 6b  |o/os..linuxZ...k|
		000000b0  75 62 65 72 6e 65 74 65  73 2e 69 6f 2f 61 72 63  |ubernetes.io/arc|
		000000c0  68 12 05 61 72 6d 36 34  5a 2e 0a 16 6b 75 62 65  |h..arm64Z...kub [truncated 19107 chars]
	 >
	I0317 10:52:04.492950  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.493025  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:04.493036  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.493045  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.493053  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.495353  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:04.495378  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.495387  121672 round_trippers.go:587]     Audit-Id: ed495acf-6334-40c7-a6c0-cb7f7fb48820
	I0317 10:52:04.495391  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.495395  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.495398  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.495401  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.495407  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.495585  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 22 0a a7 0f 0a 14  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 32 38 36 38 36 33  2d 6d 30 32 12 00 1a 00  |e-286863-m02....|
		00000030  22 00 2a 24 61 39 39 38  64 30 39 61 2d 37 61 38  |".*$a998d09a-7a8|
		00000040  63 2d 34 35 35 32 2d 38  33 37 33 2d 63 36 35 35  |c-4552-8373-c655|
		00000050  63 65 65 38 33 61 66 65  32 03 34 39 35 38 00 42  |cee83afe2.4958.B|
		00000060  08 08 d2 fd df be 06 10  00 5a 20 0a 17 62 65 74  |.........Z ..bet|
		00000070  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		00000080  61 72 63 68 12 05 61 72  6d 36 34 5a 1e 0a 15 62  |arch..arm64Z...b|
		00000090  65 74 61 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |eta.kubernetes.i|
		000000a0  6f 2f 6f 73 12 05 6c 69  6e 75 78 5a 1b 0a 12 6b  |o/os..linuxZ...k|
		000000b0  75 62 65 72 6e 65 74 65  73 2e 69 6f 2f 61 72 63  |ubernetes.io/arc|
		000000c0  68 12 05 61 72 6d 36 34  5a 2e 0a 16 6b 75 62 65  |h..arm64Z...kub [truncated 20770 chars]
	 >
	I0317 10:52:04.495689  121672 node_ready.go:49] node "multinode-286863-m02" has status "Ready":"True"
	I0317 10:52:04.495703  121672 node_ready.go:38] duration metric: took 1.00297158s for node "multinode-286863-m02" to be "Ready" ...
	I0317 10:52:04.495713  121672 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0317 10:52:04.495758  121672 type.go:204] "Request Body" body=""
	I0317 10:52:04.495794  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods
	I0317 10:52:04.495802  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.495809  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.495813  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.498145  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:04.498166  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.498174  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.498178  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.498182  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.498185  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.498192  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.498195  121672 round_trippers.go:587]     Audit-Id: 6b7b9fa6-796c-4865-b57e-618ec4805ca7
	I0317 10:52:04.499617  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 e3 83 03 0a  09 0a 00 12 03 34 39 35  |ist..........495|
		00000020  1a 00 12 c9 25 0a b7 17  0a 18 63 6f 72 65 64 6e  |....%.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 62 36 6d  |s-668d6bf9bc-b6m|
		00000040  68 39 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |h9..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  63 30 64 66 63 30 37 62  |stem".*$c0dfc07b|
		00000070  2d 63 30 36 64 2d 34 38  37 34 2d 38 62 61 34 2d  |-c06d-4874-8ba4-|
		00000080  65 38 30 33 33 62 30 39  37 38 38 66 32 03 34 34  |e8033b09788f2.44|
		00000090  38 38 00 42 08 08 b4 fd  df be 06 10 00 5a 13 0a  |88.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 244190 chars]
	 >
	I0317 10:52:04.499978  121672 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.500033  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.500075  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-b6mh9
	I0317 10:52:04.500086  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.500094  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.500099  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.502085  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:52:04.502105  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.502114  121672 round_trippers.go:587]     Audit-Id: f9b57526-9bc8-45ae-8eff-abc9b86750e0
	I0317 10:52:04.502117  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.502136  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.502144  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.502147  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.502150  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.502388  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  c9 25 0a b7 17 0a 18 63  6f 72 65 64 6e 73 2d 36  |.%.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 62 36 6d 68 39 12  |68d6bf9bc-b6mh9.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 63 30 64  66 63 30 37 62 2d 63 30  |m".*$c0dfc07b-c0|
		00000060  36 64 2d 34 38 37 34 2d  38 62 61 34 2d 65 38 30  |6d-4874-8ba4-e80|
		00000070  33 33 62 30 39 37 38 38  66 32 03 34 34 38 38 00  |33b09788f2.4488.|
		00000080  42 08 08 b4 fd df be 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 22833 chars]
	 >
	I0317 10:52:04.502518  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.502557  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:04.502568  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.502575  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.502580  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.504453  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:52:04.504475  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.504483  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.504486  121672 round_trippers.go:587]     Audit-Id: 148668c2-fad2-432d-9403-bde6918eda9c
	I0317 10:52:04.504490  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.504493  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.504497  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.504500  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.504704  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 25 0a 90 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..%.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 36  30 38 00 42 08 08 ac fd  |5f582.4608.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 22666 chars]
	 >
	I0317 10:52:04.504798  121672 pod_ready.go:93] pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace has status "Ready":"True"
	I0317 10:52:04.504815  121672 pod_ready.go:82] duration metric: took 4.813512ms for pod "coredns-668d6bf9bc-b6mh9" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.504825  121672 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.504863  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.504897  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-286863
	I0317 10:52:04.504907  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.504914  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.504918  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.506566  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:52:04.506587  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.506595  121672 round_trippers.go:587]     Audit-Id: c98bd2ef-1808-4118-8d6f-a3619a21b483
	I0317 10:52:04.506599  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.506602  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.506605  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.506607  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.506610  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.506843  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  87 2b 0a 9a 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 32 38  36 38 36 33 12 00 1a 0b  |inode-286863....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 31  |kube-system".*$1|
		00000040  63 64 66 38 33 30 63 2d  30 64 34 31 2d 34 63 65  |cdf830c-0d41-4ce|
		00000050  37 2d 39 66 62 62 2d 31  35 37 34 66 62 30 65 38  |7-9fbb-1574fb0e8|
		00000060  33 64 66 32 03 34 32 38  38 00 42 08 08 af fd df  |3df2.4288.B.....|
		00000070  be 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4d  |.control-planebM|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26307 chars]
	 >
	I0317 10:52:04.506975  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.507013  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:04.507018  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.507029  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.507038  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.508678  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:52:04.508696  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.508703  121672 round_trippers.go:587]     Audit-Id: 61df17c0-a922-4ced-8bb4-ff91417bc983
	I0317 10:52:04.508708  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.508712  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.508716  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.508719  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.508722  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.509053  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 25 0a 90 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..%.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 36  30 38 00 42 08 08 ac fd  |5f582.4608.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 22666 chars]
	 >
	I0317 10:52:04.509196  121672 pod_ready.go:93] pod "etcd-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:52:04.509214  121672 pod_ready.go:82] duration metric: took 4.377808ms for pod "etcd-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.509234  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.509270  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.509306  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-286863
	I0317 10:52:04.509315  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.509322  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.509327  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.511119  121672 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0317 10:52:04.511160  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.511190  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.511207  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.511243  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.511272  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.511284  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.511288  121672 round_trippers.go:587]     Audit-Id: 5b503d26-2c43-4da3-8ac2-c1ecd3eccdd7
	I0317 10:52:04.511554  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  bd 39 0a dc 1f 0a 1f 6b  75 62 65 2d 61 70 69 73  |.9.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  32 38 36 38 36 33 12 00  1a 0b 6b 75 62 65 2d 73  |286863....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 62 34 35 37 35 33 33  |ystem".*$b457533|
		00000050  37 2d 37 39 31 37 2d 34  66 32 33 2d 62 31 65 32  |7-7917-4f23-b1e2|
		00000060  2d 63 36 33 31 37 33 39  31 34 35 62 65 32 03 34  |-c631739145be2.4|
		00000070  33 31 38 00 42 08 08 af  fd df be 06 10 00 5a 1b  |318.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 54 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebT.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 35461 chars]
	 >
	I0317 10:52:04.511673  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.511711  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:04.511722  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.511729  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.511733  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.513775  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:04.513793  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.513801  121672 round_trippers.go:587]     Audit-Id: 46873880-37cd-4903-911a-2d7850101900
	I0317 10:52:04.513804  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.513807  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.513810  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.513814  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.513817  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.514044  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 25 0a 90 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..%.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 36  30 38 00 42 08 08 ac fd  |5f582.4608.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 22666 chars]
	 >
	I0317 10:52:04.514160  121672 pod_ready.go:93] pod "kube-apiserver-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:52:04.514174  121672 pod_ready.go:82] duration metric: took 4.928095ms for pod "kube-apiserver-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.514185  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.514223  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.514268  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-286863
	I0317 10:52:04.514281  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.514288  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.514293  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.516534  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:04.516561  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.516570  121672 round_trippers.go:587]     Audit-Id: 7e50080f-0e38-433a-97c8-0f908a0166cd
	I0317 10:52:04.516574  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.516577  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.516580  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.516583  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.516589  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.516805  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a7 36 0a ca 20 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.6.. .(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 32 38 36 38 36 33 12  |ultinode-286863.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 33 62 38 39 66 36  39 36 2d 32 33 66 32 2d  |*$3b89f696-23f2-|
		00000060  34 64 34 35 2d 39 65 30  38 2d 65 33 32 30 32 38  |4d45-9e08-e32028|
		00000070  63 37 39 32 30 65 32 03  33 38 33 38 00 42 08 08  |c7920e2.3838.B..|
		00000080  af fd df be 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 33417 chars]
	 >
	I0317 10:52:04.516934  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.516977  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:04.516987  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.516994  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.516999  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.519268  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:04.519292  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.519301  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.519306  121672 round_trippers.go:587]     Audit-Id: c6f26908-71df-4111-ae86-73c21148c578
	I0317 10:52:04.519310  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.519313  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.519316  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.519319  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.519501  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 25 0a 90 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..%.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 36  30 38 00 42 08 08 ac fd  |5f582.4608.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 22666 chars]
	 >
	I0317 10:52:04.519599  121672 pod_ready.go:93] pod "kube-controller-manager-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:52:04.519619  121672 pod_ready.go:82] duration metric: took 5.425352ms for pod "kube-controller-manager-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.519634  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-9xbpl" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.519684  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.694051  121672 request.go:661] Waited for 174.294368ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-9xbpl
	I0317 10:52:04.694108  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-9xbpl
	I0317 10:52:04.694115  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.694145  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.694158  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.699328  121672 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0317 10:52:04.699350  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.699359  121672 round_trippers.go:587]     Audit-Id: 6164b41a-a0a5-4a46-bb7c-5c49aceb52a2
	I0317 10:52:04.699363  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.699367  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.699372  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.699377  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.699380  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.699570  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  8a 25 0a be 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 39 78 62 70 6c 12  0b 6b 75 62 65 2d 70 72  |y-9xbpl..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 66 61  32 35 36 33 63 2d 30 36  |m".*$afa2563c-06|
		00000050  37 62 2d 34 65 31 32 2d  62 38 66 34 2d 38 64 62  |7b-4e12-b8f4-8db|
		00000060  37 33 38 61 38 38 30 63  63 32 03 33 38 34 38 00  |738a880cc2.3848.|
		00000070  42 08 08 b4 fd df be 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22518 chars]
	 >
	I0317 10:52:04.699708  121672 type.go:168] "Request Body" body=""
	I0317 10:52:04.894092  121672 request.go:661] Waited for 194.345739ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:04.894160  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:04.894166  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:04.894228  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:04.894240  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:04.896775  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:04.896794  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:04.896801  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:04.896805  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:04.896810  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:04 GMT
	I0317 10:52:04.896813  121672 round_trippers.go:587]     Audit-Id: 2f53b3c4-0082-4305-b077-240d759b16f1
	I0317 10:52:04.896816  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:04.896818  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:04.897604  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 25 0a 90 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..%.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 36  30 38 00 42 08 08 ac fd  |5f582.4608.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 22666 chars]
	 >
	I0317 10:52:04.897762  121672 pod_ready.go:93] pod "kube-proxy-9xbpl" in "kube-system" namespace has status "Ready":"True"
	I0317 10:52:04.897802  121672 pod_ready.go:82] duration metric: took 378.131026ms for pod "kube-proxy-9xbpl" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.897819  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-mg9hg" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:04.897889  121672 type.go:168] "Request Body" body=""
	I0317 10:52:05.093206  121672 request.go:661] Waited for 195.265248ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-mg9hg
	I0317 10:52:05.093302  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-mg9hg
	I0317 10:52:05.093363  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:05.093373  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:05.093378  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:05.096272  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:05.096348  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:05.096370  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:05.096391  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:05 GMT
	I0317 10:52:05.096429  121672 round_trippers.go:587]     Audit-Id: 50ca255e-6222-4fd4-ba15-aa8ce3518e1a
	I0317 10:52:05.096452  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:05.096474  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:05.096509  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:05.097207  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9a 25 0a eb 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 6d 67 39 68 67 12  0b 6b 75 62 65 2d 70 72  |y-mg9hg..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 63 33 65  33 63 32 37 34 2d 36 36  |m".*$c3e3c274-66|
		00000050  61 65 2d 34 31 62 32 2d  62 32 36 34 2d 33 63 36  |ae-41b2-b264-3c6|
		00000060  33 65 33 38 33 31 36 35  31 32 03 34 39 36 38 00  |3e38316512.4968.|
		00000070  42 08 08 d2 fd df be 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22597 chars]
	 >
	I0317 10:52:05.097410  121672 type.go:168] "Request Body" body=""
	I0317 10:52:05.293570  121672 request.go:661] Waited for 196.097217ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:05.293680  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:05.293744  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:05.293770  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:05.293787  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:05.296047  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:05.296116  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:05.296139  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:05.296161  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:05.296202  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:05 GMT
	I0317 10:52:05.296221  121672 round_trippers.go:587]     Audit-Id: a2717106-1506-4970-84f1-9e68ce5ee120
	I0317 10:52:05.296241  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:05.296277  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:05.296910  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 22 0a a7 0f 0a 14  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 32 38 36 38 36 33  2d 6d 30 32 12 00 1a 00  |e-286863-m02....|
		00000030  22 00 2a 24 61 39 39 38  64 30 39 61 2d 37 61 38  |".*$a998d09a-7a8|
		00000040  63 2d 34 35 35 32 2d 38  33 37 33 2d 63 36 35 35  |c-4552-8373-c655|
		00000050  63 65 65 38 33 61 66 65  32 03 34 39 35 38 00 42  |cee83afe2.4958.B|
		00000060  08 08 d2 fd df be 06 10  00 5a 20 0a 17 62 65 74  |.........Z ..bet|
		00000070  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		00000080  61 72 63 68 12 05 61 72  6d 36 34 5a 1e 0a 15 62  |arch..arm64Z...b|
		00000090  65 74 61 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |eta.kubernetes.i|
		000000a0  6f 2f 6f 73 12 05 6c 69  6e 75 78 5a 1b 0a 12 6b  |o/os..linuxZ...k|
		000000b0  75 62 65 72 6e 65 74 65  73 2e 69 6f 2f 61 72 63  |ubernetes.io/arc|
		000000c0  68 12 05 61 72 6d 36 34  5a 2e 0a 16 6b 75 62 65  |h..arm64Z...kub [truncated 20770 chars]
	 >
	I0317 10:52:05.398378  121672 type.go:168] "Request Body" body=""
	I0317 10:52:05.493623  121672 request.go:661] Waited for 95.186334ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-mg9hg
	I0317 10:52:05.493698  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-mg9hg
	I0317 10:52:05.493706  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:05.493731  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:05.493742  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:05.496164  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:05.496233  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:05.496257  121672 round_trippers.go:587]     Audit-Id: 25031876-a864-4539-9752-6c89857801ca
	I0317 10:52:05.496278  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:05.496316  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:05.496333  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:05.496354  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:05.496375  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:05 GMT
	I0317 10:52:05.497019  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9a 25 0a eb 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 6d 67 39 68 67 12  0b 6b 75 62 65 2d 70 72  |y-mg9hg..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 63 33 65  33 63 32 37 34 2d 36 36  |m".*$c3e3c274-66|
		00000050  61 65 2d 34 31 62 32 2d  62 32 36 34 2d 33 63 36  |ae-41b2-b264-3c6|
		00000060  33 65 33 38 33 31 36 35  31 32 03 34 39 36 38 00  |3e38316512.4968.|
		00000070  42 08 08 d2 fd df be 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22597 chars]
	 >
	I0317 10:52:05.497221  121672 type.go:168] "Request Body" body=""
	I0317 10:52:05.693580  121672 request.go:661] Waited for 196.288879ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:05.693694  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:05.693709  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:05.693719  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:05.693723  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:05.695754  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:05.695831  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:05.695849  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:05.695855  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:05.695858  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:05.695861  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:05.695864  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:05 GMT
	I0317 10:52:05.695867  121672 round_trippers.go:587]     Audit-Id: 732386db-6bd3-4d94-9362-5eb0c83cb703
	I0317 10:52:05.696046  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 22 0a a7 0f 0a 14  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 32 38 36 38 36 33  2d 6d 30 32 12 00 1a 00  |e-286863-m02....|
		00000030  22 00 2a 24 61 39 39 38  64 30 39 61 2d 37 61 38  |".*$a998d09a-7a8|
		00000040  63 2d 34 35 35 32 2d 38  33 37 33 2d 63 36 35 35  |c-4552-8373-c655|
		00000050  63 65 65 38 33 61 66 65  32 03 34 39 35 38 00 42  |cee83afe2.4958.B|
		00000060  08 08 d2 fd df be 06 10  00 5a 20 0a 17 62 65 74  |.........Z ..bet|
		00000070  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		00000080  61 72 63 68 12 05 61 72  6d 36 34 5a 1e 0a 15 62  |arch..arm64Z...b|
		00000090  65 74 61 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |eta.kubernetes.i|
		000000a0  6f 2f 6f 73 12 05 6c 69  6e 75 78 5a 1b 0a 12 6b  |o/os..linuxZ...k|
		000000b0  75 62 65 72 6e 65 74 65  73 2e 69 6f 2f 61 72 63  |ubernetes.io/arc|
		000000c0  68 12 05 61 72 6d 36 34  5a 2e 0a 16 6b 75 62 65  |h..arm64Z...kub [truncated 20770 chars]
	 >
	I0317 10:52:05.898682  121672 type.go:168] "Request Body" body=""
	I0317 10:52:05.898753  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-proxy-mg9hg
	I0317 10:52:05.898760  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:05.898768  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:05.898773  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:05.901122  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:05.901148  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:05.901157  121672 round_trippers.go:587]     Audit-Id: 7c3f012a-48ef-458e-ba35-b00cb082da43
	I0317 10:52:05.901163  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:05.901168  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:05.901172  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:05.901175  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:05.901180  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:05 GMT
	I0317 10:52:05.901742  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  92 25 0a be 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 6d 67 39 68 67 12  0b 6b 75 62 65 2d 70 72  |y-mg9hg..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 63 33 65  33 63 32 37 34 2d 36 36  |m".*$c3e3c274-66|
		00000050  61 65 2d 34 31 62 32 2d  62 32 36 34 2d 33 63 36  |ae-41b2-b264-3c6|
		00000060  33 65 33 38 33 31 36 35  31 32 03 35 30 34 38 00  |3e38316512.5048.|
		00000070  42 08 08 d2 fd df be 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22589 chars]
	 >
	I0317 10:52:05.901885  121672 type.go:168] "Request Body" body=""
	I0317 10:52:06.093157  121672 request.go:661] Waited for 191.224469ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:06.093263  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863-m02
	I0317 10:52:06.093275  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:06.093284  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:06.093289  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:06.095661  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:06.095689  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:06.095706  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:06.095711  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:06 GMT
	I0317 10:52:06.095715  121672 round_trippers.go:587]     Audit-Id: afa3bbaf-e720-406f-aea0-a01c6d328db5
	I0317 10:52:06.095720  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:06.095724  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:06.095728  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:06.095963  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 22 0a a7 0f 0a 14  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 32 38 36 38 36 33  2d 6d 30 32 12 00 1a 00  |e-286863-m02....|
		00000030  22 00 2a 24 61 39 39 38  64 30 39 61 2d 37 61 38  |".*$a998d09a-7a8|
		00000040  63 2d 34 35 35 32 2d 38  33 37 33 2d 63 36 35 35  |c-4552-8373-c655|
		00000050  63 65 65 38 33 61 66 65  32 03 34 39 35 38 00 42  |cee83afe2.4958.B|
		00000060  08 08 d2 fd df be 06 10  00 5a 20 0a 17 62 65 74  |.........Z ..bet|
		00000070  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		00000080  61 72 63 68 12 05 61 72  6d 36 34 5a 1e 0a 15 62  |arch..arm64Z...b|
		00000090  65 74 61 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |eta.kubernetes.i|
		000000a0  6f 2f 6f 73 12 05 6c 69  6e 75 78 5a 1b 0a 12 6b  |o/os..linuxZ...k|
		000000b0  75 62 65 72 6e 65 74 65  73 2e 69 6f 2f 61 72 63  |ubernetes.io/arc|
		000000c0  68 12 05 61 72 6d 36 34  5a 2e 0a 16 6b 75 62 65  |h..arm64Z...kub [truncated 20770 chars]
	 >
	I0317 10:52:06.096097  121672 pod_ready.go:93] pod "kube-proxy-mg9hg" in "kube-system" namespace has status "Ready":"True"
	I0317 10:52:06.096114  121672 pod_ready.go:82] duration metric: took 1.198287415s for pod "kube-proxy-mg9hg" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:06.096127  121672 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:06.096179  121672 type.go:168] "Request Body" body=""
	I0317 10:52:06.293614  121672 request.go:661] Waited for 197.370351ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-286863
	I0317 10:52:06.293706  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-286863
	I0317 10:52:06.293722  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:06.293737  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:06.293745  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:06.296095  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:06.296118  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:06.296128  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:06.296132  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:06.296135  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:06 GMT
	I0317 10:52:06.296139  121672 round_trippers.go:587]     Audit-Id: 7c8d7faa-10a8-4711-834b-98ddf29c4446
	I0317 10:52:06.296143  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:06.296145  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:06.296414  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  e3 22 0a 80 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  32 38 36 38 36 33 12 00  1a 0b 6b 75 62 65 2d 73  |286863....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 32 34 34 35 30 65 65  |ystem".*$24450ee|
		00000050  66 2d 61 66 65 66 2d 34  34 66 37 2d 61 37 32 39  |f-afef-44f7-a729|
		00000060  2d 32 31 39 30 63 34 64  35 65 34 35 34 32 03 34  |-2190c4d5e4542.4|
		00000070  33 34 38 00 42 08 08 af  fd df be 06 10 00 5a 1b  |348.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21089 chars]
	 >
	I0317 10:52:06.296588  121672 type.go:168] "Request Body" body=""
	I0317 10:52:06.493906  121672 request.go:661] Waited for 197.255226ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:06.493962  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes/multinode-286863
	I0317 10:52:06.493967  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:06.493976  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:06.493980  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:06.496280  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:06.496338  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:06.496370  121672 round_trippers.go:587]     Audit-Id: b72e940a-81da-47aa-b93f-9ae5a70908d0
	I0317 10:52:06.496388  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:06.496400  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:06.496406  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:06.496410  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:06.496413  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:06 GMT
	I0317 10:52:06.496587  121672 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 9f 25 0a 90 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..%.....multinod|
		00000020  65 2d 32 38 36 38 36 33  12 00 1a 00 22 00 2a 24  |e-286863....".*$|
		00000030  65 39 39 33 61 62 63 64  2d 35 37 35 33 2d 34 62  |e993abcd-5753-4b|
		00000040  62 64 2d 38 61 30 35 2d  35 38 38 36 37 61 30 61  |bd-8a05-58867a0a|
		00000050  35 66 35 38 32 03 34 36  30 38 00 42 08 08 ac fd  |5f582.4608.B....|
		00000060  df be 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 72 6d 36 34 5a  1e 0a 15 62 65 74 61 2e  |..arm64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  72 6d 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |rm64Z*..kuberne [truncated 22666 chars]
	 >
	I0317 10:52:06.496691  121672 pod_ready.go:93] pod "kube-scheduler-multinode-286863" in "kube-system" namespace has status "Ready":"True"
	I0317 10:52:06.496715  121672 pod_ready.go:82] duration metric: took 400.574181ms for pod "kube-scheduler-multinode-286863" in "kube-system" namespace to be "Ready" ...
	I0317 10:52:06.496731  121672 pod_ready.go:39] duration metric: took 2.001002559s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0317 10:52:06.496747  121672 system_svc.go:44] waiting for kubelet service to be running ....
	I0317 10:52:06.496809  121672 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0317 10:52:06.508575  121672 system_svc.go:56] duration metric: took 11.819269ms WaitForService to wait for kubelet
	I0317 10:52:06.508602  121672 kubeadm.go:582] duration metric: took 3.13741984s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0317 10:52:06.508621  121672 node_conditions.go:102] verifying NodePressure condition ...
	I0317 10:52:06.508674  121672 type.go:204] "Request Body" body=""
	I0317 10:52:06.694035  121672 request.go:661] Waited for 185.322088ms due to client-side throttling, not priority and fairness, request: GET:https://192.168.67.2:8443/api/v1/nodes
	I0317 10:52:06.694103  121672 round_trippers.go:470] GET https://192.168.67.2:8443/api/v1/nodes
	I0317 10:52:06.694110  121672 round_trippers.go:476] Request Headers:
	I0317 10:52:06.694116  121672 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0317 10:52:06.694125  121672 round_trippers.go:480]     User-Agent: minikube-linux-arm64/v0.0.0 (linux/arm64) kubernetes/$Format
	I0317 10:52:06.696638  121672 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0317 10:52:06.696677  121672 round_trippers.go:584] Response Headers:
	I0317 10:52:06.696686  121672 round_trippers.go:587]     Audit-Id: b46abed6-65ea-4093-bda8-017478dc0305
	I0317 10:52:06.696690  121672 round_trippers.go:587]     Cache-Control: no-cache, private
	I0317 10:52:06.696694  121672 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0317 10:52:06.696697  121672 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: cc280369-a3ef-40e9-8283-5f0226c7c2a1
	I0317 10:52:06.696717  121672 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: ea04bf6c-d542-4a5c-a7c9-4154867dc08e
	I0317 10:52:06.696728  121672 round_trippers.go:587]     Date: Mon, 17 Mar 2025 10:52:06 GMT
	I0317 10:52:06.697012  121672 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 cf 47 0a  09 0a 00 12 03 35 31 33  |List..G......513|
		00000020  1a 00 12 9f 25 0a 90 11  0a 10 6d 75 6c 74 69 6e  |....%.....multin|
		00000030  6f 64 65 2d 32 38 36 38  36 33 12 00 1a 00 22 00  |ode-286863....".|
		00000040  2a 24 65 39 39 33 61 62  63 64 2d 35 37 35 33 2d  |*$e993abcd-5753-|
		00000050  34 62 62 64 2d 38 61 30  35 2d 35 38 38 36 37 61  |4bbd-8a05-58867a|
		00000060  30 61 35 66 35 38 32 03  34 36 30 38 00 42 08 08  |0a5f582.4608.B..|
		00000070  ac fd df be 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 72 6d 36  34 5a 1e 0a 15 62 65 74  |ch..arm64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 44395 chars]
	 >
	I0317 10:52:06.697167  121672 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0317 10:52:06.697189  121672 node_conditions.go:123] node cpu capacity is 2
	I0317 10:52:06.697201  121672 node_conditions.go:122] node storage ephemeral capacity is 203034800Ki
	I0317 10:52:06.697206  121672 node_conditions.go:123] node cpu capacity is 2
	I0317 10:52:06.697211  121672 node_conditions.go:105] duration metric: took 188.584916ms to run NodePressure ...
	I0317 10:52:06.697227  121672 start.go:241] waiting for startup goroutines ...
	I0317 10:52:06.697267  121672 start.go:255] writing updated cluster config ...
	I0317 10:52:06.697565  121672 ssh_runner.go:195] Run: rm -f paused
	I0317 10:52:06.754544  121672 start.go:600] kubectl: 1.32.3, cluster: 1.32.2 (minor skew: 0)
	I0317 10:52:06.759443  121672 out.go:177] * Done! kubectl is now configured to use "multinode-286863" cluster and "default" namespace by default
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	4fcd4500ed69e       89a35e2ebb6b9       11 minutes ago      Running             busybox                   0                   c365cec361f11       busybox-58667487b6-6q5tk
	ff31fdfd22a18       2f6c962e7b831       11 minutes ago      Running             coredns                   0                   cb5e57c9bf16e       coredns-668d6bf9bc-b6mh9
	0345da9c7e263       ee75e27fff91c       12 minutes ago      Running             kindnet-cni               0                   8378731501b29       kindnet-krz7g
	a720772642c00       ba04bb24b9575       12 minutes ago      Running             storage-provisioner       0                   bb6783e71d4fd       storage-provisioner
	cd66844fb4723       e5aac5df76d9b       12 minutes ago      Running             kube-proxy                0                   e27e703146063       kube-proxy-9xbpl
	8ca23ed56ddd0       3c9285acfd2ff       12 minutes ago      Running             kube-controller-manager   0                   e1e5eb7aa0b7d       kube-controller-manager-multinode-286863
	0224d523c6c15       6417e1437b6d9       12 minutes ago      Running             kube-apiserver            0                   2cbbc085cef9c       kube-apiserver-multinode-286863
	e85f28ee94c94       7fc9d4aa817aa       12 minutes ago      Running             etcd                      0                   a6767976935c5       etcd-multinode-286863
	10345579e4f7c       82dfa03f692fb       12 minutes ago      Running             kube-scheduler            0                   3b3e6995acdd8       kube-scheduler-multinode-286863
	
	
	==> containerd <==
	Mar 17 10:51:34 multinode-286863 containerd[841]: time="2025-03-17T10:51:34.979371091Z" level=info msg="PullImage \"docker.io/kindest/kindnetd:v20250214-acbabc1a\" returns image reference \"sha256:ee75e27fff91c8d59835f9a3efdf968ff404e580bad69746a65bcf3e304ab26f\""
	Mar 17 10:51:34 multinode-286863 containerd[841]: time="2025-03-17T10:51:34.982815655Z" level=info msg="CreateContainer within sandbox \"8378731501b2974912f05aae9cbf8b8317873af22b75f10da8c1f90b2d497f04\" for container &ContainerMetadata{Name:kindnet-cni,Attempt:0,}"
	Mar 17 10:51:35 multinode-286863 containerd[841]: time="2025-03-17T10:51:35.004556047Z" level=info msg="CreateContainer within sandbox \"8378731501b2974912f05aae9cbf8b8317873af22b75f10da8c1f90b2d497f04\" for &ContainerMetadata{Name:kindnet-cni,Attempt:0,} returns container id \"0345da9c7e26329c3ad4d26be1c24ae5f8116018afb7645efee96f4a099f7f05\""
	Mar 17 10:51:35 multinode-286863 containerd[841]: time="2025-03-17T10:51:35.005338571Z" level=info msg="StartContainer for \"0345da9c7e26329c3ad4d26be1c24ae5f8116018afb7645efee96f4a099f7f05\""
	Mar 17 10:51:35 multinode-286863 containerd[841]: time="2025-03-17T10:51:35.147448761Z" level=info msg="StartContainer for \"0345da9c7e26329c3ad4d26be1c24ae5f8116018afb7645efee96f4a099f7f05\" returns successfully"
	Mar 17 10:51:37 multinode-286863 containerd[841]: time="2025-03-17T10:51:37.876880842Z" level=info msg="No cni config template is specified, wait for other system components to drop the config."
	Mar 17 10:51:48 multinode-286863 containerd[841]: time="2025-03-17T10:51:48.440559014Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-b6mh9,Uid:c0dfc07b-c06d-4874-8ba4-e8033b09788f,Namespace:kube-system,Attempt:0,}"
	Mar 17 10:51:48 multinode-286863 containerd[841]: time="2025-03-17T10:51:48.537989571Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-b6mh9,Uid:c0dfc07b-c06d-4874-8ba4-e8033b09788f,Namespace:kube-system,Attempt:0,} returns sandbox id \"cb5e57c9bf16eec7097f81943bdc77511d1ed34fe895639c99204ed33ffd8ba5\""
	Mar 17 10:51:48 multinode-286863 containerd[841]: time="2025-03-17T10:51:48.542043267Z" level=info msg="CreateContainer within sandbox \"cb5e57c9bf16eec7097f81943bdc77511d1ed34fe895639c99204ed33ffd8ba5\" for container &ContainerMetadata{Name:coredns,Attempt:0,}"
	Mar 17 10:51:48 multinode-286863 containerd[841]: time="2025-03-17T10:51:48.568148129Z" level=info msg="CreateContainer within sandbox \"cb5e57c9bf16eec7097f81943bdc77511d1ed34fe895639c99204ed33ffd8ba5\" for &ContainerMetadata{Name:coredns,Attempt:0,} returns container id \"ff31fdfd22a182dfcdb8ebfbc8ac0db64266ec0719a44e774510763f3a3f2c80\""
	Mar 17 10:51:48 multinode-286863 containerd[841]: time="2025-03-17T10:51:48.569961772Z" level=info msg="StartContainer for \"ff31fdfd22a182dfcdb8ebfbc8ac0db64266ec0719a44e774510763f3a3f2c80\""
	Mar 17 10:51:48 multinode-286863 containerd[841]: time="2025-03-17T10:51:48.634060071Z" level=info msg="StartContainer for \"ff31fdfd22a182dfcdb8ebfbc8ac0db64266ec0719a44e774510763f3a3f2c80\" returns successfully"
	Mar 17 10:52:08 multinode-286863 containerd[841]: time="2025-03-17T10:52:08.762724567Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-58667487b6-6q5tk,Uid:e1052bae-a6b5-4978-89ea-f1ab4442d4d0,Namespace:default,Attempt:0,}"
	Mar 17 10:52:08 multinode-286863 containerd[841]: time="2025-03-17T10:52:08.868244532Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:busybox-58667487b6-6q5tk,Uid:e1052bae-a6b5-4978-89ea-f1ab4442d4d0,Namespace:default,Attempt:0,} returns sandbox id \"c365cec361f1114d2eee394fe4d91bb6185b2b5e2ee6e9abe3e171ad4fa62b13\""
	Mar 17 10:52:08 multinode-286863 containerd[841]: time="2025-03-17T10:52:08.869974398Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\""
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.792905359Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox:1.28\" labels:{key:\"io.cri-containerd.image\" value:\"managed\"}"
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.794814883Z" level=info msg="stop pulling image gcr.io/k8s-minikube/busybox:1.28: active requests=0, bytes read=766310"
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.797306266Z" level=info msg="ImageCreate event name:\"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\" labels:{key:\"io.cri-containerd.image\" value:\"managed\"}"
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.803749052Z" level=info msg="ImageCreate event name:\"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\" labels:{key:\"io.cri-containerd.image\" value:\"managed\"}"
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.804771317Z" level=info msg="Pulled image \"gcr.io/k8s-minikube/busybox:1.28\" with image id \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\", repo tag \"gcr.io/k8s-minikube/busybox:1.28\", repo digest \"gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12\", size \"764554\" in 1.93465114s"
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.804926933Z" level=info msg="PullImage \"gcr.io/k8s-minikube/busybox:1.28\" returns image reference \"sha256:89a35e2ebb6b938201966889b5e8c85b931db6432c5643966116cd1c28bf45cd\""
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.809257966Z" level=info msg="CreateContainer within sandbox \"c365cec361f1114d2eee394fe4d91bb6185b2b5e2ee6e9abe3e171ad4fa62b13\" for container &ContainerMetadata{Name:busybox,Attempt:0,}"
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.839032737Z" level=info msg="CreateContainer within sandbox \"c365cec361f1114d2eee394fe4d91bb6185b2b5e2ee6e9abe3e171ad4fa62b13\" for &ContainerMetadata{Name:busybox,Attempt:0,} returns container id \"4fcd4500ed69e5b3b7374b671bf7de29418c8c4d3505f4a3f38adcd58358f79e\""
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.841834705Z" level=info msg="StartContainer for \"4fcd4500ed69e5b3b7374b671bf7de29418c8c4d3505f4a3f38adcd58358f79e\""
	Mar 17 10:52:10 multinode-286863 containerd[841]: time="2025-03-17T10:52:10.904275314Z" level=info msg="StartContainer for \"4fcd4500ed69e5b3b7374b671bf7de29418c8c4d3505f4a3f38adcd58358f79e\" returns successfully"
	
	
	==> coredns [ff31fdfd22a182dfcdb8ebfbc8ac0db64266ec0719a44e774510763f3a3f2c80] <==
	[INFO] 10.244.1.2:47184 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 44 0.001574317s
	[INFO] 10.244.0.3:38219 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000112378s
	[INFO] 10.244.0.3:49320 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.002088936s
	[INFO] 10.244.0.3:57633 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000136238s
	[INFO] 10.244.0.3:53734 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000095221s
	[INFO] 10.244.0.3:58280 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001812127s
	[INFO] 10.244.0.3:37382 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000153248s
	[INFO] 10.244.0.3:38712 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000105593s
	[INFO] 10.244.0.3:57522 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000108735s
	[INFO] 10.244.1.2:43010 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000179865s
	[INFO] 10.244.1.2:57947 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001553533s
	[INFO] 10.244.1.2:44889 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000221934s
	[INFO] 10.244.1.2:59960 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000170191s
	[INFO] 10.244.1.2:41575 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 36 0.001295596s
	[INFO] 10.244.1.2:38239 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000208617s
	[INFO] 10.244.1.2:49046 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000158221s
	[INFO] 10.244.1.2:40251 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000130224s
	[INFO] 10.244.0.3:57057 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000085276s
	[INFO] 10.244.0.3:49378 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000114339s
	[INFO] 10.244.0.3:35450 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000105805s
	[INFO] 10.244.0.3:59805 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000050265s
	[INFO] 10.244.1.2:42508 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000140161s
	[INFO] 10.244.1.2:54777 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000170273s
	[INFO] 10.244.1.2:49119 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000127541s
	[INFO] 10.244.1.2:43076 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000091381s
	
	
	==> describe nodes <==
	Name:               multinode-286863
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-286863
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=28b3ce799b018a38b7c40f89b465976263272e76
	                    minikube.k8s.io/name=multinode-286863
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_03_17T10_51_28_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 17 Mar 2025 10:51:24 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-286863
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 17 Mar 2025 11:03:42 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 17 Mar 2025 11:03:11 +0000   Mon, 17 Mar 2025 10:51:22 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 17 Mar 2025 11:03:11 +0000   Mon, 17 Mar 2025 10:51:22 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 17 Mar 2025 11:03:11 +0000   Mon, 17 Mar 2025 10:51:22 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 17 Mar 2025 11:03:11 +0000   Mon, 17 Mar 2025 10:51:25 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.2
	  Hostname:    multinode-286863
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022296Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022296Ki
	  pods:               110
	System Info:
	  Machine ID:                 799de4db1d3e46ec8a546d5e73433e83
	  System UUID:                4f82cfdf-aa97-4c68-a359-02cc626f660d
	  Boot ID:                    6191d711-482a-47cf-8e52-43bf2fb89a15
	  Kernel Version:             5.15.0-1077-aws
	  OS Image:                   Ubuntu 22.04.5 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.25
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-6q5tk                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         11m
	  kube-system                 coredns-668d6bf9bc-b6mh9                    100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     12m
	  kube-system                 etcd-multinode-286863                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         12m
	  kube-system                 kindnet-krz7g                               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      12m
	  kube-system                 kube-apiserver-multinode-286863             250m (12%)    0 (0%)      0 (0%)           0 (0%)         12m
	  kube-system                 kube-controller-manager-multinode-286863    200m (10%)    0 (0%)      0 (0%)           0 (0%)         12m
	  kube-system                 kube-proxy-9xbpl                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         12m
	  kube-system                 kube-scheduler-multinode-286863             100m (5%)     0 (0%)      0 (0%)           0 (0%)         12m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         12m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age                From             Message
	  ----     ------                   ----               ----             -------
	  Normal   Starting                 12m                kube-proxy       
	  Normal   NodeHasSufficientMemory  12m (x8 over 12m)  kubelet          Node multinode-286863 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    12m (x8 over 12m)  kubelet          Node multinode-286863 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     12m (x7 over 12m)  kubelet          Node multinode-286863 status is now: NodeHasSufficientPID
	  Normal   NodeAllocatableEnforced  12m                kubelet          Updated Node Allocatable limit across pods
	  Normal   Starting                 12m                kubelet          Starting kubelet.
	  Warning  CgroupV1                 12m                kubelet          cgroup v1 support is in maintenance mode, please migrate to cgroup v2
	  Normal   NodeAllocatableEnforced  12m                kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  12m                kubelet          Node multinode-286863 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    12m                kubelet          Node multinode-286863 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     12m                kubelet          Node multinode-286863 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           12m                node-controller  Node multinode-286863 event: Registered Node multinode-286863 in Controller
	
	
	Name:               multinode-286863-m02
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=multinode-286863-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=28b3ce799b018a38b7c40f89b465976263272e76
	                    minikube.k8s.io/name=multinode-286863
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_03_17T10_52_03_0700
	                    minikube.k8s.io/version=v1.35.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 17 Mar 2025 10:52:02 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-286863-m02
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 17 Mar 2025 11:03:35 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 17 Mar 2025 11:03:35 +0000   Mon, 17 Mar 2025 10:52:02 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 17 Mar 2025 11:03:35 +0000   Mon, 17 Mar 2025 10:52:02 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 17 Mar 2025 11:03:35 +0000   Mon, 17 Mar 2025 10:52:02 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 17 Mar 2025 11:03:35 +0000   Mon, 17 Mar 2025 10:52:04 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.67.3
	  Hostname:    multinode-286863-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022296Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022296Ki
	  pods:               110
	System Info:
	  Machine ID:                 3e79e5134f50430da44fbd751f043b9b
	  System UUID:                ca7c7c94-f058-481f-ae53-ed2c78b99cfc
	  Boot ID:                    6191d711-482a-47cf-8e52-43bf2fb89a15
	  Kernel Version:             5.15.0-1077-aws
	  OS Image:                   Ubuntu 22.04.5 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.25
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-kn9lf    0 (0%)        0 (0%)      0 (0%)           0 (0%)         11m
	  kube-system                 kindnet-m64f7               100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      11m
	  kube-system                 kube-proxy-mg9hg            0 (0%)        0 (0%)      0 (0%)           0 (0%)         11m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (0%)  50Mi (0%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-1Gi      0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	  hugepages-32Mi     0 (0%)     0 (0%)
	  hugepages-64Ki     0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 11m                kube-proxy       
	  Normal  NodeHasSufficientMemory  11m (x2 over 11m)  kubelet          Node multinode-286863-m02 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    11m (x2 over 11m)  kubelet          Node multinode-286863-m02 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     11m (x2 over 11m)  kubelet          Node multinode-286863-m02 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  11m                kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeReady                11m                kubelet          Node multinode-286863-m02 status is now: NodeReady
	  Normal  RegisteredNode           11m                node-controller  Node multinode-286863-m02 event: Registered Node multinode-286863-m02 in Controller
	
	
	==> dmesg <==
	[Mar17 10:17] ACPI: SRAT not present
	[  +0.000000] ACPI: SRAT not present
	[  +0.000000] SPI driver altr_a10sr has no spi_device_id for altr,a10sr
	[  +0.014407] device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log.
	[  +0.504211] systemd[1]: Configuration file /run/systemd/system/netplan-ovs-cleanup.service is marked world-inaccessible. This has no effect as configuration data is accessible via APIs without restrictions. Proceeding anyway.
	[  +0.033838] systemd[1]: /lib/systemd/system/snapd.service:23: Unknown key name 'RestartMode' in section 'Service', ignoring.
	[  +0.824746] ena 0000:00:05.0: LLQ is not supported Fallback to host mode policy.
	[  +6.163432] kauditd_printk_skb: 36 callbacks suppressed
	
	
	==> etcd [e85f28ee94c94d59f13002251599d81ba6986638f27688debef638ae5419e0eb] <==
	{"level":"info","ts":"2025-03-17T10:51:21.507072Z","caller":"embed/etcd.go:280","msg":"now serving peer/client/metrics","local-member-id":"8688e899f7831fc7","initial-advertise-peer-urls":["https://192.168.67.2:2380"],"listen-peer-urls":["https://192.168.67.2:2380"],"advertise-client-urls":["https://192.168.67.2:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://192.168.67.2:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2025-03-17T10:51:21.507127Z","caller":"embed/etcd.go:871","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2025-03-17T10:51:21.954938Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 is starting a new election at term 1"}
	{"level":"info","ts":"2025-03-17T10:51:21.955022Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became pre-candidate at term 1"}
	{"level":"info","ts":"2025-03-17T10:51:21.955185Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgPreVoteResp from 8688e899f7831fc7 at term 1"}
	{"level":"info","ts":"2025-03-17T10:51:21.955212Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became candidate at term 2"}
	{"level":"info","ts":"2025-03-17T10:51:21.955290Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 received MsgVoteResp from 8688e899f7831fc7 at term 2"}
	{"level":"info","ts":"2025-03-17T10:51:21.955363Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8688e899f7831fc7 became leader at term 2"}
	{"level":"info","ts":"2025-03-17T10:51:21.955384Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: 8688e899f7831fc7 elected leader 8688e899f7831fc7 at term 2"}
	{"level":"info","ts":"2025-03-17T10:51:21.959026Z","caller":"etcdserver/server.go:2651","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T10:51:21.964955Z","caller":"etcdserver/server.go:2140","msg":"published local member to cluster through raft","local-member-id":"8688e899f7831fc7","local-member-attributes":"{Name:multinode-286863 ClientURLs:[https://192.168.67.2:2379]}","request-path":"/0/members/8688e899f7831fc7/attributes","cluster-id":"9d8fdeb88b6def78","publish-timeout":"7s"}
	{"level":"info","ts":"2025-03-17T10:51:21.965114Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-03-17T10:51:21.965391Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-03-17T10:51:21.965508Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2025-03-17T10:51:21.965523Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2025-03-17T10:51:21.966045Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-03-17T10:51:21.966806Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.67.2:2379"}
	{"level":"info","ts":"2025-03-17T10:51:21.967247Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-03-17T10:51:21.967898Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2025-03-17T10:51:21.981081Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"9d8fdeb88b6def78","local-member-id":"8688e899f7831fc7","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T10:51:21.981190Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T10:51:21.981226Z","caller":"etcdserver/server.go:2675","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T11:01:22.148137Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":837}
	{"level":"info","ts":"2025-03-17T11:01:22.156761Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":837,"took":"8.351726ms","hash":2521836519,"current-db-size-bytes":2580480,"current-db-size":"2.6 MB","current-db-size-in-use-bytes":2580480,"current-db-size-in-use":"2.6 MB"}
	{"level":"info","ts":"2025-03-17T11:01:22.156837Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":2521836519,"revision":837,"compact-revision":-1}
	
	
	==> kernel <==
	 11:03:44 up 46 min,  0 users,  load average: 0.41, 0.43, 1.17
	Linux multinode-286863 5.15.0-1077-aws #84~20.04.1-Ubuntu SMP Mon Jan 20 22:14:27 UTC 2025 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.5 LTS"
	
	
	==> kindnet [0345da9c7e26329c3ad4d26be1c24ae5f8116018afb7645efee96f4a099f7f05] <==
	I0317 11:02:35.628717       1 main.go:324] Node multinode-286863-m02 has CIDR [10.244.1.0/24] 
	I0317 11:02:45.627777       1 main.go:297] Handling node with IPs: map[192.168.67.2:{}]
	I0317 11:02:45.627813       1 main.go:301] handling current node
	I0317 11:02:45.627830       1 main.go:297] Handling node with IPs: map[192.168.67.3:{}]
	I0317 11:02:45.627838       1 main.go:324] Node multinode-286863-m02 has CIDR [10.244.1.0/24] 
	I0317 11:02:55.627887       1 main.go:297] Handling node with IPs: map[192.168.67.2:{}]
	I0317 11:02:55.627926       1 main.go:301] handling current node
	I0317 11:02:55.627944       1 main.go:297] Handling node with IPs: map[192.168.67.3:{}]
	I0317 11:02:55.627950       1 main.go:324] Node multinode-286863-m02 has CIDR [10.244.1.0/24] 
	I0317 11:03:05.628817       1 main.go:297] Handling node with IPs: map[192.168.67.2:{}]
	I0317 11:03:05.628856       1 main.go:301] handling current node
	I0317 11:03:05.628876       1 main.go:297] Handling node with IPs: map[192.168.67.3:{}]
	I0317 11:03:05.628883       1 main.go:324] Node multinode-286863-m02 has CIDR [10.244.1.0/24] 
	I0317 11:03:15.627792       1 main.go:297] Handling node with IPs: map[192.168.67.2:{}]
	I0317 11:03:15.627829       1 main.go:301] handling current node
	I0317 11:03:15.627845       1 main.go:297] Handling node with IPs: map[192.168.67.3:{}]
	I0317 11:03:15.627851       1 main.go:324] Node multinode-286863-m02 has CIDR [10.244.1.0/24] 
	I0317 11:03:25.628048       1 main.go:297] Handling node with IPs: map[192.168.67.3:{}]
	I0317 11:03:25.628091       1 main.go:324] Node multinode-286863-m02 has CIDR [10.244.1.0/24] 
	I0317 11:03:25.628600       1 main.go:297] Handling node with IPs: map[192.168.67.2:{}]
	I0317 11:03:25.628627       1 main.go:301] handling current node
	I0317 11:03:35.628639       1 main.go:297] Handling node with IPs: map[192.168.67.2:{}]
	I0317 11:03:35.628676       1 main.go:301] handling current node
	I0317 11:03:35.628693       1 main.go:297] Handling node with IPs: map[192.168.67.3:{}]
	I0317 11:03:35.628700       1 main.go:324] Node multinode-286863-m02 has CIDR [10.244.1.0/24] 
	
	
	==> kube-apiserver [0224d523c6c15863a6dbd8bf493c53872bfaa4fd93a2f9bf76cd68b27e00fa20] <==
	I0317 10:51:25.000699       1 policy_source.go:240] refreshing policies
	E0317 10:51:25.014502       1 controller.go:145] "Failed to ensure lease exists, will retry" err="namespaces \"kube-system\" not found" interval="200ms"
	I0317 10:51:25.032360       1 shared_informer.go:320] Caches are synced for configmaps
	I0317 10:51:25.077511       1 controller.go:615] quota admission added evaluator for: namespaces
	I0317 10:51:25.220630       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0317 10:51:25.656738       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0317 10:51:25.668262       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0317 10:51:25.668287       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0317 10:51:26.422038       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0317 10:51:26.481681       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0317 10:51:26.543895       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0317 10:51:26.551665       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.67.2]
	I0317 10:51:26.552955       1 controller.go:615] quota admission added evaluator for: endpoints
	I0317 10:51:26.558108       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0317 10:51:26.957450       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0317 10:51:27.475192       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0317 10:51:27.489372       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0317 10:51:27.505292       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0317 10:51:32.310148       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0317 10:51:32.511753       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0317 11:03:42.133361       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:46208: use of closed network connection
	E0317 11:03:42.361477       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:46224: use of closed network connection
	E0317 11:03:42.582102       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:46250: use of closed network connection
	E0317 11:03:42.803881       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:46268: use of closed network connection
	E0317 11:03:43.012657       1 conn.go:339] Error on socket receive: read tcp 192.168.67.2:8443->192.168.67.1:46284: use of closed network connection
	
	
	==> kube-controller-manager [8ca23ed56ddd0d0edf803790f7da02b8dc7db65b5681659ef6252b268d39bb26] <==
	I0317 10:52:02.827256       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:52:02.827340       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:52:02.841122       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:52:03.063921       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:52:03.365655       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:52:04.245055       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:52:04.245385       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-286863-m02"
	I0317 10:52:04.256525       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:52:06.517436       1 node_lifecycle_controller.go:886] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-286863-m02"
	I0317 10:52:07.538760       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="51.826962ms"
	I0317 10:52:07.550848       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="12.034791ms"
	I0317 10:52:07.551752       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="37.054µs"
	I0317 10:52:07.564403       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="64.525µs"
	I0317 10:52:07.568750       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="51.371µs"
	I0317 10:52:11.633050       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="5.572158ms"
	I0317 10:52:11.633365       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="54.875µs"
	I0317 10:52:28.810148       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863"
	I0317 10:57:07.708663       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 10:58:04.841498       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863"
	I0317 11:02:13.001701       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 11:03:04.101971       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	I0317 11:03:10.825153       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="9.233562ms"
	I0317 11:03:10.825240       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="43.069µs"
	I0317 11:03:11.066182       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863"
	I0317 11:03:35.027807       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-286863-m02"
	
	
	==> kube-proxy [cd66844fb47238ec8b6cc543e7d70ab38456e72c10b738f2a68ab46629ed3e86] <==
	I0317 10:51:33.362837       1 server_linux.go:66] "Using iptables proxy"
	I0317 10:51:33.518234       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["192.168.67.2"]
	E0317 10:51:33.518331       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0317 10:51:33.580655       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0317 10:51:33.580722       1 server_linux.go:170] "Using iptables Proxier"
	I0317 10:51:33.582975       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0317 10:51:33.583307       1 server.go:497] "Version info" version="v1.32.2"
	I0317 10:51:33.583321       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0317 10:51:33.587982       1 config.go:199] "Starting service config controller"
	I0317 10:51:33.588021       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0317 10:51:33.588049       1 config.go:105] "Starting endpoint slice config controller"
	I0317 10:51:33.588054       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0317 10:51:33.588708       1 config.go:329] "Starting node config controller"
	I0317 10:51:33.588722       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0317 10:51:33.688179       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0317 10:51:33.688227       1 shared_informer.go:320] Caches are synced for service config
	I0317 10:51:33.690809       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [10345579e4f7c8b2488bc000d5979c82e7b7949dd2ec8c7c04a5c9e23c6a1e47] <==
	W0317 10:51:26.060676       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0317 10:51:26.060699       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.060763       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0317 10:51:26.060785       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.060856       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0317 10:51:26.060876       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.061136       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0317 10:51:26.061162       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.061209       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0317 10:51:26.061240       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.061305       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0317 10:51:26.061327       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.061402       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0317 10:51:26.061420       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.061493       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0317 10:51:26.061516       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.061581       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0317 10:51:26.061600       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.062049       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0317 10:51:26.062083       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.062125       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0317 10:51:26.062148       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 10:51:26.065601       1 reflector.go:569] runtime/asm_arm64.s:1223: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0317 10:51:26.065655       1 reflector.go:166] "Unhandled Error" err="runtime/asm_arm64.s:1223: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0317 10:51:27.250923       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: E0317 10:51:33.155634    1528 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-668d6bf9bc-cvx2q_kube-system(3fffb8f7-7f53-4405-9cf0-d214d0a940b5)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-668d6bf9bc-cvx2q_kube-system(3fffb8f7-7f53-4405-9cf0-d214d0a940b5)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"4d40a2ced9abe0ea07bb8ebe5de413a12540646b7aa71fc355c82a0977d67113\\\": failed to find network info for sandbox \\\"4d40a2ced9abe0ea07bb8ebe5de413a12540646b7aa71fc355c82a0977d67113\\\"\"" pod="kube-system/coredns-668d6bf9bc-cvx2q" podUID="3fffb8f7-7f53-4405-9cf0-d214d0a940b5"
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: E0317 10:51:33.163021    1528 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\": failed to find network info for sandbox \"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\""
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: E0317 10:51:33.163091    1528 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\": failed to find network info for sandbox \"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\"" pod="kube-system/coredns-668d6bf9bc-b6mh9"
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: E0317 10:51:33.163113    1528 kuberuntime_manager.go:1237] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\": failed to find network info for sandbox \"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\"" pod="kube-system/coredns-668d6bf9bc-b6mh9"
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: E0317 10:51:33.163174    1528 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-668d6bf9bc-b6mh9_kube-system(c0dfc07b-c06d-4874-8ba4-e8033b09788f)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-668d6bf9bc-b6mh9_kube-system(c0dfc07b-c06d-4874-8ba4-e8033b09788f)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\\\": failed to find network info for sandbox \\\"843f7b9f059e23599d747062aa94bb0ba6ae6d479b7f66df9de2749876557874\\\"\"" pod="kube-system/coredns-668d6bf9bc-b6mh9" podUID="c0dfc07b-c06d-4874-8ba4-e8033b09788f"
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: I0317 10:51:33.567470    1528 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kube-proxy-9xbpl" podStartSLOduration=1.567450424 podStartE2EDuration="1.567450424s" podCreationTimestamp="2025-03-17 10:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-03-17 10:51:33.567308827 +0000 UTC m=+6.284003592" watchObservedRunningTime="2025-03-17 10:51:33.567450424 +0000 UTC m=+6.284145165"
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: I0317 10:51:33.873056    1528 reconciler_common.go:162] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fffb8f7-7f53-4405-9cf0-d214d0a940b5-config-volume\") pod \"3fffb8f7-7f53-4405-9cf0-d214d0a940b5\" (UID: \"3fffb8f7-7f53-4405-9cf0-d214d0a940b5\") "
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: I0317 10:51:33.873228    1528 reconciler_common.go:162] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwd7s\" (UniqueName: \"kubernetes.io/projected/3fffb8f7-7f53-4405-9cf0-d214d0a940b5-kube-api-access-xwd7s\") pod \"3fffb8f7-7f53-4405-9cf0-d214d0a940b5\" (UID: \"3fffb8f7-7f53-4405-9cf0-d214d0a940b5\") "
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: I0317 10:51:33.873833    1528 operation_generator.go:780] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fffb8f7-7f53-4405-9cf0-d214d0a940b5-config-volume" (OuterVolumeSpecName: "config-volume") pod "3fffb8f7-7f53-4405-9cf0-d214d0a940b5" (UID: "3fffb8f7-7f53-4405-9cf0-d214d0a940b5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGIDValue ""
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: I0317 10:51:33.875904    1528 operation_generator.go:780] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fffb8f7-7f53-4405-9cf0-d214d0a940b5-kube-api-access-xwd7s" (OuterVolumeSpecName: "kube-api-access-xwd7s") pod "3fffb8f7-7f53-4405-9cf0-d214d0a940b5" (UID: "3fffb8f7-7f53-4405-9cf0-d214d0a940b5"). InnerVolumeSpecName "kube-api-access-xwd7s". PluginName "kubernetes.io/projected", VolumeGIDValue ""
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: I0317 10:51:33.974178    1528 reconciler_common.go:299] "Volume detached for volume \"kube-api-access-xwd7s\" (UniqueName: \"kubernetes.io/projected/3fffb8f7-7f53-4405-9cf0-d214d0a940b5-kube-api-access-xwd7s\") on node \"multinode-286863\" DevicePath \"\""
	Mar 17 10:51:33 multinode-286863 kubelet[1528]: I0317 10:51:33.974222    1528 reconciler_common.go:299] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3fffb8f7-7f53-4405-9cf0-d214d0a940b5-config-volume\") on node \"multinode-286863\" DevicePath \"\""
	Mar 17 10:51:34 multinode-286863 kubelet[1528]: I0317 10:51:34.074532    1528 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/c3aadd96-8f0b-4481-af0f-a6d8a264b0ef-tmp\") pod \"storage-provisioner\" (UID: \"c3aadd96-8f0b-4481-af0f-a6d8a264b0ef\") " pod="kube-system/storage-provisioner"
	Mar 17 10:51:34 multinode-286863 kubelet[1528]: I0317 10:51:34.074592    1528 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p67vh\" (UniqueName: \"kubernetes.io/projected/c3aadd96-8f0b-4481-af0f-a6d8a264b0ef-kube-api-access-p67vh\") pod \"storage-provisioner\" (UID: \"c3aadd96-8f0b-4481-af0f-a6d8a264b0ef\") " pod="kube-system/storage-provisioner"
	Mar 17 10:51:35 multinode-286863 kubelet[1528]: I0317 10:51:35.442377    1528 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fffb8f7-7f53-4405-9cf0-d214d0a940b5" path="/var/lib/kubelet/pods/3fffb8f7-7f53-4405-9cf0-d214d0a940b5/volumes"
	Mar 17 10:51:35 multinode-286863 kubelet[1528]: I0317 10:51:35.567988    1528 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/kindnet-krz7g" podStartSLOduration=1.787041817 podStartE2EDuration="3.567968122s" podCreationTimestamp="2025-03-17 10:51:32 +0000 UTC" firstStartedPulling="2025-03-17 10:51:33.199166173 +0000 UTC m=+5.915860914" lastFinishedPulling="2025-03-17 10:51:34.980092478 +0000 UTC m=+7.696787219" observedRunningTime="2025-03-17 10:51:35.552393395 +0000 UTC m=+8.269088152" watchObservedRunningTime="2025-03-17 10:51:35.567968122 +0000 UTC m=+8.284662863"
	Mar 17 10:51:37 multinode-286863 kubelet[1528]: I0317 10:51:37.771274    1528 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=4.771253432 podStartE2EDuration="4.771253432s" podCreationTimestamp="2025-03-17 10:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-03-17 10:51:35.569214283 +0000 UTC m=+8.285909032" watchObservedRunningTime="2025-03-17 10:51:37.771253432 +0000 UTC m=+10.487948173"
	Mar 17 10:51:37 multinode-286863 kubelet[1528]: I0317 10:51:37.876303    1528 kuberuntime_manager.go:1702] "Updating runtime config through cri with podcidr" CIDR="10.244.0.0/24"
	Mar 17 10:51:37 multinode-286863 kubelet[1528]: I0317 10:51:37.877141    1528 kubelet_network.go:61] "Updating Pod CIDR" originalPodCIDR="" newPodCIDR="10.244.0.0/24"
	Mar 17 10:51:49 multinode-286863 kubelet[1528]: I0317 10:51:49.599275    1528 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-668d6bf9bc-b6mh9" podStartSLOduration=17.599252710000002 podStartE2EDuration="17.59925271s" podCreationTimestamp="2025-03-17 10:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-03-17 10:51:49.580779288 +0000 UTC m=+22.297474037" watchObservedRunningTime="2025-03-17 10:51:49.59925271 +0000 UTC m=+22.315947451"
	Mar 17 10:52:07 multinode-286863 kubelet[1528]: W0317 10:52:07.565749    1528 reflector.go:569] object-"default"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:multinode-286863" cannot list resource "configmaps" in API group "" in the namespace "default": no relationship found between node 'multinode-286863' and this object
	Mar 17 10:52:07 multinode-286863 kubelet[1528]: E0317 10:52:07.565803    1528 reflector.go:166] "Unhandled Error" err="object-\"default\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:multinode-286863\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"default\": no relationship found between node 'multinode-286863' and this object" logger="UnhandledError"
	Mar 17 10:52:07 multinode-286863 kubelet[1528]: I0317 10:52:07.565954    1528 status_manager.go:890] "Failed to get status for pod" podUID="e1052bae-a6b5-4978-89ea-f1ab4442d4d0" pod="default/busybox-58667487b6-6q5tk" err="pods \"busybox-58667487b6-6q5tk\" is forbidden: User \"system:node:multinode-286863\" cannot get resource \"pods\" in API group \"\" in the namespace \"default\": no relationship found between node 'multinode-286863' and this object"
	Mar 17 10:52:07 multinode-286863 kubelet[1528]: I0317 10:52:07.634852    1528 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd4l6\" (UniqueName: \"kubernetes.io/projected/e1052bae-a6b5-4978-89ea-f1ab4442d4d0-kube-api-access-gd4l6\") pod \"busybox-58667487b6-6q5tk\" (UID: \"e1052bae-a6b5-4978-89ea-f1ab4442d4d0\") " pod="default/busybox-58667487b6-6q5tk"
	Mar 17 11:03:42 multinode-286863 kubelet[1528]: E0317 11:03:42.364267    1528 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.67.2:48606->192.168.67.2:10010: write tcp 192.168.67.2:48606->192.168.67.2:10010: write: broken pipe
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p multinode-286863 -n multinode-286863
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-286863 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestMultiNode/serial/DeployApp2Nodes FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiNode/serial/DeployApp2Nodes (698.78s)

                                                
                                    
x
+
TestPause/serial/Start (632.82s)

                                                
                                                
=== RUN   TestPause/serial/Start
pause_test.go:80: (dbg) Run:  out/minikube-linux-arm64 start -p pause-600904 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=containerd
pause_test.go:80: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p pause-600904 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=containerd: exit status 80 (10m30.635738105s)

                                                
                                                
-- stdout --
	* [pause-600904] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=20535
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on user configuration
	* Using Docker driver with root privileges
	* Starting "pause-600904" primary control-plane node in "pause-600904" cluster
	* Pulling base image v0.0.46-1741860993-20523 ...
	* Creating docker container (CPUs=2, Memory=2048MB) ...
	* Preparing Kubernetes v1.32.2 on containerd 1.7.25 ...
	  - Generating certificates and keys ...
	  - Booting up control plane ...
	  - Configuring RBAC rules ...
	* Configuring CNI (Container Networking Interface) ...
	* Verifying Kubernetes components...
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0317 11:18:30.505000  191740 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-668d6bf9bc-4wz9h" in "kube-system" namespace (skipping!): pods "coredns-668d6bf9bc-4wz9h" not found
	E0317 11:22:30.510174  191740 pod_ready.go:67] WaitExtra: waitPodCondition: context deadline exceeded
	X Problems detected in kubelet:
	  Mar 17 11:18:27 pause-600904 kubelet[1566]: W0317 11:18:27.794183    1566 reflector.go:569] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:pause-600904" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'pause-600904' and this object
	  Mar 17 11:18:27 pause-600904 kubelet[1566]: E0317 11:18:27.794253    1566 reflector.go:166] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:pause-600904\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'pause-600904' and this object" logger="UnhandledError"
	X Problems detected in kubelet:
	  Mar 17 11:18:27 pause-600904 kubelet[1566]: W0317 11:18:27.794183    1566 reflector.go:569] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:pause-600904" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'pause-600904' and this object
	  Mar 17 11:18:27 pause-600904 kubelet[1566]: E0317 11:18:27.794253    1566 reflector.go:166] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:pause-600904\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'pause-600904' and this object" logger="UnhandledError"
	X Problems detected in kubelet:
	  Mar 17 11:18:27 pause-600904 kubelet[1566]: W0317 11:18:27.794183    1566 reflector.go:569] object-"kube-system"/"kube-proxy": failed to list *v1.ConfigMap: configmaps "kube-proxy" is forbidden: User "system:node:pause-600904" cannot list resource "configmaps" in API group "" in the namespace "kube-system": no relationship found between node 'pause-600904' and this object
	  Mar 17 11:18:27 pause-600904 kubelet[1566]: E0317 11:18:27.794253    1566 reflector.go:166] "Unhandled Error" err="object-\"kube-system\"/\"kube-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-proxy\" is forbidden: User \"system:node:pause-600904\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\": no relationship found between node 'pause-600904' and this object" logger="UnhandledError"
	X Exiting due to GUEST_START: failed to start node: wait 6m0s for node: waiting for apps_running: expected k8s-apps: missing components: kube-dns
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
pause_test.go:82: failed to start minikube with args: "out/minikube-linux-arm64 start -p pause-600904 --memory=2048 --install-addons=false --wait=all --driver=docker  --container-runtime=containerd" : exit status 80
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:230: ======>  post-mortem[TestPause/serial/Start]: docker inspect <======
helpers_test.go:231: (dbg) Run:  docker inspect pause-600904
helpers_test.go:235: (dbg) docker inspect pause-600904:

                                                
                                                
-- stdout --
	[
	    {
	        "Id": "03aed43efeaf424ea5d003029c12ab26316fce01db00bf61ed494a0126c3144d",
	        "Created": "2025-03-17T11:17:55.703261058Z",
	        "Path": "/usr/local/bin/entrypoint",
	        "Args": [
	            "/sbin/init"
	        ],
	        "State": {
	            "Status": "running",
	            "Running": true,
	            "Paused": false,
	            "Restarting": false,
	            "OOMKilled": false,
	            "Dead": false,
	            "Pid": 192301,
	            "ExitCode": 0,
	            "Error": "",
	            "StartedAt": "2025-03-17T11:17:55.762860202Z",
	            "FinishedAt": "0001-01-01T00:00:00Z"
	        },
	        "Image": "sha256:df0c2544fb3106b890f0a9ab81fcf49f97edb092b83e47f42288ad5dfe1f4b40",
	        "ResolvConfPath": "/var/lib/docker/containers/03aed43efeaf424ea5d003029c12ab26316fce01db00bf61ed494a0126c3144d/resolv.conf",
	        "HostnamePath": "/var/lib/docker/containers/03aed43efeaf424ea5d003029c12ab26316fce01db00bf61ed494a0126c3144d/hostname",
	        "HostsPath": "/var/lib/docker/containers/03aed43efeaf424ea5d003029c12ab26316fce01db00bf61ed494a0126c3144d/hosts",
	        "LogPath": "/var/lib/docker/containers/03aed43efeaf424ea5d003029c12ab26316fce01db00bf61ed494a0126c3144d/03aed43efeaf424ea5d003029c12ab26316fce01db00bf61ed494a0126c3144d-json.log",
	        "Name": "/pause-600904",
	        "RestartCount": 0,
	        "Driver": "overlay2",
	        "Platform": "linux",
	        "MountLabel": "",
	        "ProcessLabel": "",
	        "AppArmorProfile": "unconfined",
	        "ExecIDs": null,
	        "HostConfig": {
	            "Binds": [
	                "/lib/modules:/lib/modules:ro",
	                "pause-600904:/var"
	            ],
	            "ContainerIDFile": "",
	            "LogConfig": {
	                "Type": "json-file",
	                "Config": {}
	            },
	            "NetworkMode": "pause-600904",
	            "PortBindings": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": ""
	                    }
	                ]
	            },
	            "RestartPolicy": {
	                "Name": "no",
	                "MaximumRetryCount": 0
	            },
	            "AutoRemove": false,
	            "VolumeDriver": "",
	            "VolumesFrom": null,
	            "ConsoleSize": [
	                0,
	                0
	            ],
	            "CapAdd": null,
	            "CapDrop": null,
	            "CgroupnsMode": "host",
	            "Dns": [],
	            "DnsOptions": [],
	            "DnsSearch": [],
	            "ExtraHosts": null,
	            "GroupAdd": null,
	            "IpcMode": "private",
	            "Cgroup": "",
	            "Links": null,
	            "OomScoreAdj": 0,
	            "PidMode": "",
	            "Privileged": true,
	            "PublishAllPorts": false,
	            "ReadonlyRootfs": false,
	            "SecurityOpt": [
	                "seccomp=unconfined",
	                "apparmor=unconfined",
	                "label=disable"
	            ],
	            "Tmpfs": {
	                "/run": "",
	                "/tmp": ""
	            },
	            "UTSMode": "",
	            "UsernsMode": "",
	            "ShmSize": 67108864,
	            "Runtime": "runc",
	            "Isolation": "",
	            "CpuShares": 0,
	            "Memory": 2147483648,
	            "NanoCpus": 2000000000,
	            "CgroupParent": "",
	            "BlkioWeight": 0,
	            "BlkioWeightDevice": [],
	            "BlkioDeviceReadBps": [],
	            "BlkioDeviceWriteBps": [],
	            "BlkioDeviceReadIOps": [],
	            "BlkioDeviceWriteIOps": [],
	            "CpuPeriod": 0,
	            "CpuQuota": 0,
	            "CpuRealtimePeriod": 0,
	            "CpuRealtimeRuntime": 0,
	            "CpusetCpus": "",
	            "CpusetMems": "",
	            "Devices": [],
	            "DeviceCgroupRules": null,
	            "DeviceRequests": null,
	            "MemoryReservation": 0,
	            "MemorySwap": 4294967296,
	            "MemorySwappiness": null,
	            "OomKillDisable": false,
	            "PidsLimit": null,
	            "Ulimits": [],
	            "CpuCount": 0,
	            "CpuPercent": 0,
	            "IOMaximumIOps": 0,
	            "IOMaximumBandwidth": 0,
	            "MaskedPaths": null,
	            "ReadonlyPaths": null
	        },
	        "GraphDriver": {
	            "Data": {
	                "ID": "03aed43efeaf424ea5d003029c12ab26316fce01db00bf61ed494a0126c3144d",
	                "LowerDir": "/var/lib/docker/overlay2/7d582bf285f4584f369ab17eaf016d97f40bb7a77dc7e2892fdab5f394607b27-init/diff:/var/lib/docker/overlay2/c96583d021b8dd172f2992413e93a0a3e28934ab88e8d005e42772deac52d50d/diff",
	                "MergedDir": "/var/lib/docker/overlay2/7d582bf285f4584f369ab17eaf016d97f40bb7a77dc7e2892fdab5f394607b27/merged",
	                "UpperDir": "/var/lib/docker/overlay2/7d582bf285f4584f369ab17eaf016d97f40bb7a77dc7e2892fdab5f394607b27/diff",
	                "WorkDir": "/var/lib/docker/overlay2/7d582bf285f4584f369ab17eaf016d97f40bb7a77dc7e2892fdab5f394607b27/work"
	            },
	            "Name": "overlay2"
	        },
	        "Mounts": [
	            {
	                "Type": "bind",
	                "Source": "/lib/modules",
	                "Destination": "/lib/modules",
	                "Mode": "ro",
	                "RW": false,
	                "Propagation": "rprivate"
	            },
	            {
	                "Type": "volume",
	                "Name": "pause-600904",
	                "Source": "/var/lib/docker/volumes/pause-600904/_data",
	                "Destination": "/var",
	                "Driver": "local",
	                "Mode": "z",
	                "RW": true,
	                "Propagation": ""
	            }
	        ],
	        "Config": {
	            "Hostname": "pause-600904",
	            "Domainname": "",
	            "User": "",
	            "AttachStdin": false,
	            "AttachStdout": false,
	            "AttachStderr": false,
	            "ExposedPorts": {
	                "22/tcp": {},
	                "2376/tcp": {},
	                "32443/tcp": {},
	                "5000/tcp": {},
	                "8443/tcp": {}
	            },
	            "Tty": true,
	            "OpenStdin": false,
	            "StdinOnce": false,
	            "Env": [
	                "container=docker",
	                "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
	            ],
	            "Cmd": null,
	            "Image": "gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185",
	            "Volumes": null,
	            "WorkingDir": "/",
	            "Entrypoint": [
	                "/usr/local/bin/entrypoint",
	                "/sbin/init"
	            ],
	            "OnBuild": null,
	            "Labels": {
	                "created_by.minikube.sigs.k8s.io": "true",
	                "mode.minikube.sigs.k8s.io": "pause-600904",
	                "name.minikube.sigs.k8s.io": "pause-600904",
	                "role.minikube.sigs.k8s.io": ""
	            },
	            "StopSignal": "SIGRTMIN+3"
	        },
	        "NetworkSettings": {
	            "Bridge": "",
	            "SandboxID": "ee1235293def800571481385e388ffa221c2867f44d8eb060858f7723e63f36e",
	            "SandboxKey": "/var/run/docker/netns/ee1235293def",
	            "Ports": {
	                "22/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33028"
	                    }
	                ],
	                "2376/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33029"
	                    }
	                ],
	                "32443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33032"
	                    }
	                ],
	                "5000/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33030"
	                    }
	                ],
	                "8443/tcp": [
	                    {
	                        "HostIp": "127.0.0.1",
	                        "HostPort": "33031"
	                    }
	                ]
	            },
	            "HairpinMode": false,
	            "LinkLocalIPv6Address": "",
	            "LinkLocalIPv6PrefixLen": 0,
	            "SecondaryIPAddresses": null,
	            "SecondaryIPv6Addresses": null,
	            "EndpointID": "",
	            "Gateway": "",
	            "GlobalIPv6Address": "",
	            "GlobalIPv6PrefixLen": 0,
	            "IPAddress": "",
	            "IPPrefixLen": 0,
	            "IPv6Gateway": "",
	            "MacAddress": "",
	            "Networks": {
	                "pause-600904": {
	                    "IPAMConfig": {
	                        "IPv4Address": "192.168.85.2"
	                    },
	                    "Links": null,
	                    "Aliases": null,
	                    "MacAddress": "52:18:d6:0d:61:0b",
	                    "DriverOpts": null,
	                    "GwPriority": 0,
	                    "NetworkID": "04efb0d9b3eebf8a18c5f36b50fc41975f78e6f48d3f0330405621212e88ed0a",
	                    "EndpointID": "60e7b87ac900ed7f662b3994df81abed64c77838f2c410ad18d7b78081489764",
	                    "Gateway": "192.168.85.1",
	                    "IPAddress": "192.168.85.2",
	                    "IPPrefixLen": 24,
	                    "IPv6Gateway": "",
	                    "GlobalIPv6Address": "",
	                    "GlobalIPv6PrefixLen": 0,
	                    "DNSNames": [
	                        "pause-600904",
	                        "03aed43efeaf"
	                    ]
	                }
	            }
	        }
	    }
	]

                                                
                                                
-- /stdout --
helpers_test.go:239: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p pause-600904 -n pause-600904
helpers_test.go:244: <<< TestPause/serial/Start FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestPause/serial/Start]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-linux-arm64 -p pause-600904 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-linux-arm64 -p pause-600904 logs -n 25: (1.18708051s)
helpers_test.go:252: TestPause/serial/Start logs: 
-- stdout --
	
	==> Audit <==
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	| Command |                         Args                         |         Profile          |  User   | Version |     Start Time      |      End Time       |
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	| ssh     | -p cilium-668750 sudo                                | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | systemctl cat cri-docker                             |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo cat                            | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | /etc/systemd/system/cri-docker.service.d/10-cni.conf |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo cat                            | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | /usr/lib/systemd/system/cri-docker.service           |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo                                | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | cri-dockerd --version                                |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo                                | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | systemctl status containerd                          |                          |         |         |                     |                     |
	|         | --all --full --no-pager                              |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo                                | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | systemctl cat containerd                             |                          |         |         |                     |                     |
	|         | --no-pager                                           |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo cat                            | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | /lib/systemd/system/containerd.service               |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo cat                            | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | /etc/containerd/config.toml                          |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo                                | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | containerd config dump                               |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo                                | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | systemctl status crio --all                          |                          |         |         |                     |                     |
	|         | --full --no-pager                                    |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo                                | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | systemctl cat crio --no-pager                        |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo find                           | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | /etc/crio -type f -exec sh -c                        |                          |         |         |                     |                     |
	|         | 'echo {}; cat {}' \;                                 |                          |         |         |                     |                     |
	| ssh     | -p cilium-668750 sudo crio                           | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC |                     |
	|         | config                                               |                          |         |         |                     |                     |
	| delete  | -p cilium-668750                                     | cilium-668750            | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC | 17 Mar 25 11:19 UTC |
	| start   | -p force-systemd-env-194349                          | force-systemd-env-194349 | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC | 17 Mar 25 11:19 UTC |
	|         | --memory=2048                                        |                          |         |         |                     |                     |
	|         | --alsologtostderr                                    |                          |         |         |                     |                     |
	|         | -v=5 --driver=docker                                 |                          |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                          |         |         |                     |                     |
	| ssh     | force-systemd-env-194349                             | force-systemd-env-194349 | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC | 17 Mar 25 11:19 UTC |
	|         | ssh cat                                              |                          |         |         |                     |                     |
	|         | /etc/containerd/config.toml                          |                          |         |         |                     |                     |
	| delete  | -p force-systemd-env-194349                          | force-systemd-env-194349 | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC | 17 Mar 25 11:19 UTC |
	| start   | -p cert-expiration-111228                            | cert-expiration-111228   | jenkins | v1.35.0 | 17 Mar 25 11:19 UTC | 17 Mar 25 11:20 UTC |
	|         | --memory=2048                                        |                          |         |         |                     |                     |
	|         | --cert-expiration=3m                                 |                          |         |         |                     |                     |
	|         | --driver=docker                                      |                          |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                          |         |         |                     |                     |
	| start   | -p cert-expiration-111228                            | cert-expiration-111228   | jenkins | v1.35.0 | 17 Mar 25 11:23 UTC | 17 Mar 25 11:23 UTC |
	|         | --memory=2048                                        |                          |         |         |                     |                     |
	|         | --cert-expiration=8760h                              |                          |         |         |                     |                     |
	|         | --driver=docker                                      |                          |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                          |         |         |                     |                     |
	| delete  | -p cert-expiration-111228                            | cert-expiration-111228   | jenkins | v1.35.0 | 17 Mar 25 11:23 UTC | 17 Mar 25 11:23 UTC |
	| start   | -p cert-options-469914                               | cert-options-469914      | jenkins | v1.35.0 | 17 Mar 25 11:23 UTC | 17 Mar 25 11:24 UTC |
	|         | --memory=2048                                        |                          |         |         |                     |                     |
	|         | --apiserver-ips=127.0.0.1                            |                          |         |         |                     |                     |
	|         | --apiserver-ips=192.168.15.15                        |                          |         |         |                     |                     |
	|         | --apiserver-names=localhost                          |                          |         |         |                     |                     |
	|         | --apiserver-names=www.google.com                     |                          |         |         |                     |                     |
	|         | --apiserver-port=8555                                |                          |         |         |                     |                     |
	|         | --driver=docker                                      |                          |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                          |         |         |                     |                     |
	| ssh     | cert-options-469914 ssh                              | cert-options-469914      | jenkins | v1.35.0 | 17 Mar 25 11:24 UTC | 17 Mar 25 11:24 UTC |
	|         | openssl x509 -text -noout -in                        |                          |         |         |                     |                     |
	|         | /var/lib/minikube/certs/apiserver.crt                |                          |         |         |                     |                     |
	| ssh     | -p cert-options-469914 -- sudo                       | cert-options-469914      | jenkins | v1.35.0 | 17 Mar 25 11:24 UTC | 17 Mar 25 11:24 UTC |
	|         | cat /etc/kubernetes/admin.conf                       |                          |         |         |                     |                     |
	| delete  | -p cert-options-469914                               | cert-options-469914      | jenkins | v1.35.0 | 17 Mar 25 11:24 UTC | 17 Mar 25 11:24 UTC |
	| start   | -p old-k8s-version-744018                            | old-k8s-version-744018   | jenkins | v1.35.0 | 17 Mar 25 11:24 UTC |                     |
	|         | --memory=2200                                        |                          |         |         |                     |                     |
	|         | --alsologtostderr --wait=true                        |                          |         |         |                     |                     |
	|         | --kvm-network=default                                |                          |         |         |                     |                     |
	|         | --kvm-qemu-uri=qemu:///system                        |                          |         |         |                     |                     |
	|         | --disable-driver-mounts                              |                          |         |         |                     |                     |
	|         | --keep-context=false                                 |                          |         |         |                     |                     |
	|         | --driver=docker                                      |                          |         |         |                     |                     |
	|         | --container-runtime=containerd                       |                          |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0                         |                          |         |         |                     |                     |
	|---------|------------------------------------------------------|--------------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/03/17 11:24:10
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.24.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0317 11:24:10.989208  211660 out.go:345] Setting OutFile to fd 1 ...
	I0317 11:24:10.989399  211660 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 11:24:10.989413  211660 out.go:358] Setting ErrFile to fd 2...
	I0317 11:24:10.989419  211660 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 11:24:10.989675  211660 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 11:24:10.990145  211660 out.go:352] Setting JSON to false
	I0317 11:24:10.991125  211660 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":3996,"bootTime":1742206655,"procs":204,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1077-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0317 11:24:10.991201  211660 start.go:139] virtualization:  
	I0317 11:24:10.995346  211660 out.go:177] * [old-k8s-version-744018] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	I0317 11:24:10.998754  211660 out.go:177]   - MINIKUBE_LOCATION=20535
	I0317 11:24:10.998902  211660 notify.go:220] Checking for updates...
	I0317 11:24:11.005429  211660 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0317 11:24:11.008492  211660 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 11:24:11.011639  211660 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	I0317 11:24:11.014684  211660 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0317 11:24:11.017915  211660 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0317 11:24:11.023886  211660 config.go:182] Loaded profile config "pause-600904": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 11:24:11.024012  211660 driver.go:394] Setting default libvirt URI to qemu:///system
	I0317 11:24:11.056576  211660 docker.go:123] docker version: linux-28.0.1:Docker Engine - Community
	I0317 11:24:11.056694  211660 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 11:24:11.122024  211660 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:36 OomKillDisable:true NGoroutines:52 SystemTime:2025-03-17 11:24:11.112050279 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 11:24:11.122134  211660 docker.go:318] overlay module found
	I0317 11:24:11.127345  211660 out.go:177] * Using the docker driver based on user configuration
	I0317 11:24:11.130507  211660 start.go:297] selected driver: docker
	I0317 11:24:11.130532  211660 start.go:901] validating driver "docker" against <nil>
	I0317 11:24:11.130549  211660 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0317 11:24:11.131352  211660 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 11:24:11.196925  211660 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:4 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:36 OomKillDisable:true NGoroutines:52 SystemTime:2025-03-17 11:24:11.177768686 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 11:24:11.197098  211660 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0317 11:24:11.197325  211660 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0317 11:24:11.200677  211660 out.go:177] * Using Docker driver with root privileges
	I0317 11:24:11.203686  211660 cni.go:84] Creating CNI manager for ""
	I0317 11:24:11.203754  211660 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0317 11:24:11.203770  211660 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0317 11:24:11.203847  211660 start.go:340] cluster config:
	{Name:old-k8s-version-744018 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-744018 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local
ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPat
h: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 11:24:11.208851  211660 out.go:177] * Starting "old-k8s-version-744018" primary control-plane node in "old-k8s-version-744018" cluster
	I0317 11:24:11.211940  211660 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0317 11:24:11.214902  211660 out.go:177] * Pulling base image v0.0.46-1741860993-20523 ...
	I0317 11:24:11.217745  211660 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0317 11:24:11.217802  211660 preload.go:146] Found local preload: /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	I0317 11:24:11.217815  211660 cache.go:56] Caching tarball of preloaded images
	I0317 11:24:11.217846  211660 image.go:81] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon
	I0317 11:24:11.217903  211660 preload.go:172] Found /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4 in cache, skipping download
	I0317 11:24:11.217914  211660 cache.go:59] Finished verifying existence of preloaded tar for v1.20.0 on containerd
	I0317 11:24:11.218031  211660 profile.go:143] Saving config to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/config.json ...
	I0317 11:24:11.218048  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/config.json: {Name:mkc5a73967558722eefea7a3e49c270cb62850c4 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:11.249474  211660 image.go:100] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon, skipping pull
	I0317 11:24:11.249500  211660 cache.go:145] gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 exists in daemon, skipping load
	I0317 11:24:11.249519  211660 cache.go:230] Successfully downloaded all kic artifacts
	I0317 11:24:11.249556  211660 start.go:360] acquireMachinesLock for old-k8s-version-744018: {Name:mkeecead004b68a29f0952564066496daeae9590 Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0317 11:24:11.249665  211660 start.go:364] duration metric: took 87µs to acquireMachinesLock for "old-k8s-version-744018"
	I0317 11:24:11.249694  211660 start.go:93] Provisioning new machine with config: &{Name:old-k8s-version-744018 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-744018 Namespace:default APIServerHAVIP:
APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false
DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0317 11:24:11.249760  211660 start.go:125] createHost starting for "" (driver="docker")
	I0317 11:24:11.253315  211660 out.go:235] * Creating docker container (CPUs=2, Memory=2200MB) ...
	I0317 11:24:11.253568  211660 start.go:159] libmachine.API.Create for "old-k8s-version-744018" (driver="docker")
	I0317 11:24:11.253604  211660 client.go:168] LocalClient.Create starting
	I0317 11:24:11.253670  211660 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem
	I0317 11:24:11.253713  211660 main.go:141] libmachine: Decoding PEM data...
	I0317 11:24:11.253730  211660 main.go:141] libmachine: Parsing certificate...
	I0317 11:24:11.253791  211660 main.go:141] libmachine: Reading certificate data from /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem
	I0317 11:24:11.253814  211660 main.go:141] libmachine: Decoding PEM data...
	I0317 11:24:11.253828  211660 main.go:141] libmachine: Parsing certificate...
	I0317 11:24:11.254203  211660 cli_runner.go:164] Run: docker network inspect old-k8s-version-744018 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	W0317 11:24:11.269270  211660 cli_runner.go:211] docker network inspect old-k8s-version-744018 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
	I0317 11:24:11.269355  211660 network_create.go:284] running [docker network inspect old-k8s-version-744018] to gather additional debugging logs...
	I0317 11:24:11.269376  211660 cli_runner.go:164] Run: docker network inspect old-k8s-version-744018
	W0317 11:24:11.284504  211660 cli_runner.go:211] docker network inspect old-k8s-version-744018 returned with exit code 1
	I0317 11:24:11.284534  211660 network_create.go:287] error running [docker network inspect old-k8s-version-744018]: docker network inspect old-k8s-version-744018: exit status 1
	stdout:
	[]
	
	stderr:
	Error response from daemon: network old-k8s-version-744018 not found
	I0317 11:24:11.284547  211660 network_create.go:289] output of [docker network inspect old-k8s-version-744018]: -- stdout --
	[]
	
	-- /stdout --
	** stderr ** 
	Error response from daemon: network old-k8s-version-744018 not found
	
	** /stderr **
	I0317 11:24:11.284640  211660 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0317 11:24:11.302219  211660 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-e774881651be IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:12:df:c7:61:5e:f1} reservation:<nil>}
	I0317 11:24:11.302541  211660 network.go:211] skipping subnet 192.168.58.0/24 that is taken: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName:br-d6aab97fa8ac IfaceIPv4:192.168.58.1 IfaceMTU:1500 IfaceMAC:d6:0d:b2:7e:8e:d0} reservation:<nil>}
	I0317 11:24:11.302885  211660 network.go:211] skipping subnet 192.168.67.0/24 that is taken: &{IP:192.168.67.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.67.0/24 Gateway:192.168.67.1 ClientMin:192.168.67.2 ClientMax:192.168.67.254 Broadcast:192.168.67.255 IsPrivate:true Interface:{IfaceName:br-afee3b0bb0e9 IfaceIPv4:192.168.67.1 IfaceMTU:1500 IfaceMAC:62:06:e7:12:6f:56} reservation:<nil>}
	I0317 11:24:11.303342  211660 network.go:206] using free private subnet 192.168.76.0/24: &{IP:192.168.76.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.76.0/24 Gateway:192.168.76.1 ClientMin:192.168.76.2 ClientMax:192.168.76.254 Broadcast:192.168.76.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x40019a3180}
	I0317 11:24:11.303377  211660 network_create.go:124] attempt to create docker network old-k8s-version-744018 192.168.76.0/24 with gateway 192.168.76.1 and MTU of 1500 ...
	I0317 11:24:11.303433  211660 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.76.0/24 --gateway=192.168.76.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=old-k8s-version-744018 old-k8s-version-744018
	I0317 11:24:11.366102  211660 network_create.go:108] docker network old-k8s-version-744018 192.168.76.0/24 created
	I0317 11:24:11.366134  211660 kic.go:121] calculated static IP "192.168.76.2" for the "old-k8s-version-744018" container
	I0317 11:24:11.366206  211660 cli_runner.go:164] Run: docker ps -a --format {{.Names}}
	I0317 11:24:11.385271  211660 cli_runner.go:164] Run: docker volume create old-k8s-version-744018 --label name.minikube.sigs.k8s.io=old-k8s-version-744018 --label created_by.minikube.sigs.k8s.io=true
	I0317 11:24:11.405350  211660 oci.go:103] Successfully created a docker volume old-k8s-version-744018
	I0317 11:24:11.405454  211660 cli_runner.go:164] Run: docker run --rm --name old-k8s-version-744018-preload-sidecar --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=old-k8s-version-744018 --entrypoint /usr/bin/test -v old-k8s-version-744018:/var gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -d /var/lib
	I0317 11:24:11.936706  211660 oci.go:107] Successfully prepared a docker volume old-k8s-version-744018
	I0317 11:24:11.936761  211660 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0317 11:24:11.936781  211660 kic.go:194] Starting extracting preloaded images to volume ...
	I0317 11:24:11.936867  211660 cli_runner.go:164] Run: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v old-k8s-version-744018:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -I lz4 -xf /preloaded.tar -C /extractDir
	I0317 11:24:18.521591  211660 cli_runner.go:217] Completed: docker run --rm --entrypoint /usr/bin/tar -v /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4:/preloaded.tar:ro -v old-k8s-version-744018:/extractDir gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 -I lz4 -xf /preloaded.tar -C /extractDir: (6.584678888s)
	I0317 11:24:18.521625  211660 kic.go:203] duration metric: took 6.584840531s to extract preloaded images to volume ...
	W0317 11:24:18.521775  211660 cgroups_linux.go:77] Your kernel does not support swap limit capabilities or the cgroup is not mounted.
	I0317 11:24:18.521883  211660 cli_runner.go:164] Run: docker info --format "'{{json .SecurityOptions}}'"
	I0317 11:24:18.585979  211660 cli_runner.go:164] Run: docker run -d -t --privileged --security-opt seccomp=unconfined --tmpfs /tmp --tmpfs /run -v /lib/modules:/lib/modules:ro --hostname old-k8s-version-744018 --name old-k8s-version-744018 --label created_by.minikube.sigs.k8s.io=true --label name.minikube.sigs.k8s.io=old-k8s-version-744018 --label role.minikube.sigs.k8s.io= --label mode.minikube.sigs.k8s.io=old-k8s-version-744018 --network old-k8s-version-744018 --ip 192.168.76.2 --volume old-k8s-version-744018:/var --security-opt apparmor=unconfined --memory=2200mb --cpus=2 -e container=docker --expose 8443 --publish=127.0.0.1::8443 --publish=127.0.0.1::22 --publish=127.0.0.1::2376 --publish=127.0.0.1::5000 --publish=127.0.0.1::32443 gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185
	I0317 11:24:18.876911  211660 cli_runner.go:164] Run: docker container inspect old-k8s-version-744018 --format={{.State.Running}}
	I0317 11:24:18.905167  211660 cli_runner.go:164] Run: docker container inspect old-k8s-version-744018 --format={{.State.Status}}
	I0317 11:24:18.925732  211660 cli_runner.go:164] Run: docker exec old-k8s-version-744018 stat /var/lib/dpkg/alternatives/iptables
	I0317 11:24:18.974275  211660 oci.go:144] the created container "old-k8s-version-744018" has a running status.
	I0317 11:24:18.974317  211660 kic.go:225] Creating ssh key for kic: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa...
	I0317 11:24:19.510881  211660 kic_runner.go:191] docker (temp): /home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa.pub --> /home/docker/.ssh/authorized_keys (381 bytes)
	I0317 11:24:19.535879  211660 cli_runner.go:164] Run: docker container inspect old-k8s-version-744018 --format={{.State.Status}}
	I0317 11:24:19.559380  211660 kic_runner.go:93] Run: chown docker:docker /home/docker/.ssh/authorized_keys
	I0317 11:24:19.559402  211660 kic_runner.go:114] Args: [docker exec --privileged old-k8s-version-744018 chown docker:docker /home/docker/.ssh/authorized_keys]
	I0317 11:24:19.633303  211660 cli_runner.go:164] Run: docker container inspect old-k8s-version-744018 --format={{.State.Status}}
	I0317 11:24:19.669153  211660 machine.go:93] provisionDockerMachine start ...
	I0317 11:24:19.669268  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:19.708927  211660 main.go:141] libmachine: Using SSH client type: native
	I0317 11:24:19.709302  211660 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 33053 <nil> <nil>}
	I0317 11:24:19.709320  211660 main.go:141] libmachine: About to run SSH command:
	hostname
	I0317 11:24:19.878420  211660 main.go:141] libmachine: SSH cmd err, output: <nil>: old-k8s-version-744018
	
	I0317 11:24:19.878463  211660 ubuntu.go:169] provisioning hostname "old-k8s-version-744018"
	I0317 11:24:19.878534  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:19.900527  211660 main.go:141] libmachine: Using SSH client type: native
	I0317 11:24:19.900845  211660 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 33053 <nil> <nil>}
	I0317 11:24:19.900857  211660 main.go:141] libmachine: About to run SSH command:
	sudo hostname old-k8s-version-744018 && echo "old-k8s-version-744018" | sudo tee /etc/hostname
	I0317 11:24:20.049029  211660 main.go:141] libmachine: SSH cmd err, output: <nil>: old-k8s-version-744018
	
	I0317 11:24:20.049115  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:20.074323  211660 main.go:141] libmachine: Using SSH client type: native
	I0317 11:24:20.074758  211660 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x3e66c0] 0x3e8e80 <nil>  [] 0s} 127.0.0.1 33053 <nil> <nil>}
	I0317 11:24:20.074779  211660 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sold-k8s-version-744018' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 old-k8s-version-744018/g' /etc/hosts;
				else 
					echo '127.0.1.1 old-k8s-version-744018' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0317 11:24:20.206970  211660 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0317 11:24:20.206998  211660 ubuntu.go:175] set auth options {CertDir:/home/jenkins/minikube-integration/20535-2262/.minikube CaCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem CaPrivateKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem ServerKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem ClientKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:/home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem ServerCertSANs:[] StorePath:/home/jenkins/minikube-integration/20535-2262/.minikube}
	I0317 11:24:20.207075  211660 ubuntu.go:177] setting up certificates
	I0317 11:24:20.207087  211660 provision.go:84] configureAuth start
	I0317 11:24:20.207167  211660 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" old-k8s-version-744018
	I0317 11:24:20.233948  211660 provision.go:143] copyHostCerts
	I0317 11:24:20.234025  211660 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem, removing ...
	I0317 11:24:20.234041  211660 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem
	I0317 11:24:20.234124  211660 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/ca.pem (1078 bytes)
	I0317 11:24:20.234232  211660 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem, removing ...
	I0317 11:24:20.234243  211660 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem
	I0317 11:24:20.234274  211660 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/cert.pem (1123 bytes)
	I0317 11:24:20.234351  211660 exec_runner.go:144] found /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem, removing ...
	I0317 11:24:20.234361  211660 exec_runner.go:203] rm: /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem
	I0317 11:24:20.234389  211660 exec_runner.go:151] cp: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem --> /home/jenkins/minikube-integration/20535-2262/.minikube/key.pem (1679 bytes)
	I0317 11:24:20.234452  211660 provision.go:117] generating server cert: /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem ca-key=/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem private-key=/home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem org=jenkins.old-k8s-version-744018 san=[127.0.0.1 192.168.76.2 localhost minikube old-k8s-version-744018]
	I0317 11:24:20.998921  211660 provision.go:177] copyRemoteCerts
	I0317 11:24:20.999008  211660 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0317 11:24:20.999077  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:21.016211  211660 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33053 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa Username:docker}
	I0317 11:24:21.107853  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0317 11:24:21.133853  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server.pem --> /etc/docker/server.pem (1233 bytes)
	I0317 11:24:21.158754  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/machines/server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0317 11:24:21.182692  211660 provision.go:87] duration metric: took 975.591586ms to configureAuth
	I0317 11:24:21.182717  211660 ubuntu.go:193] setting minikube options for container-runtime
	I0317 11:24:21.183006  211660 config.go:182] Loaded profile config "old-k8s-version-744018": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.20.0
	I0317 11:24:21.183017  211660 machine.go:96] duration metric: took 1.513840163s to provisionDockerMachine
	I0317 11:24:21.183033  211660 client.go:171] duration metric: took 9.929418047s to LocalClient.Create
	I0317 11:24:21.183054  211660 start.go:167] duration metric: took 9.929481168s to libmachine.API.Create "old-k8s-version-744018"
	I0317 11:24:21.183061  211660 start.go:293] postStartSetup for "old-k8s-version-744018" (driver="docker")
	I0317 11:24:21.183070  211660 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0317 11:24:21.183118  211660 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0317 11:24:21.183160  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:21.202809  211660 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33053 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa Username:docker}
	I0317 11:24:21.296055  211660 ssh_runner.go:195] Run: cat /etc/os-release
	I0317 11:24:21.299042  211660 main.go:141] libmachine: Couldn't set key VERSION_CODENAME, no corresponding struct field found
	I0317 11:24:21.299089  211660 main.go:141] libmachine: Couldn't set key PRIVACY_POLICY_URL, no corresponding struct field found
	I0317 11:24:21.299099  211660 main.go:141] libmachine: Couldn't set key UBUNTU_CODENAME, no corresponding struct field found
	I0317 11:24:21.299106  211660 info.go:137] Remote host: Ubuntu 22.04.5 LTS
	I0317 11:24:21.299117  211660 filesync.go:126] Scanning /home/jenkins/minikube-integration/20535-2262/.minikube/addons for local assets ...
	I0317 11:24:21.299181  211660 filesync.go:126] Scanning /home/jenkins/minikube-integration/20535-2262/.minikube/files for local assets ...
	I0317 11:24:21.299282  211660 filesync.go:149] local asset: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem -> 75722.pem in /etc/ssl/certs
	I0317 11:24:21.299401  211660 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0317 11:24:21.307854  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem --> /etc/ssl/certs/75722.pem (1708 bytes)
	I0317 11:24:21.331347  211660 start.go:296] duration metric: took 148.271427ms for postStartSetup
	I0317 11:24:21.331715  211660 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" old-k8s-version-744018
	I0317 11:24:21.348788  211660 profile.go:143] Saving config to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/config.json ...
	I0317 11:24:21.349062  211660 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 11:24:21.349101  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:21.366677  211660 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33053 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa Username:docker}
	I0317 11:24:21.451458  211660 ssh_runner.go:195] Run: sh -c "df -BG /var | awk 'NR==2{print $4}'"
	I0317 11:24:21.455804  211660 start.go:128] duration metric: took 10.206030054s to createHost
	I0317 11:24:21.455828  211660 start.go:83] releasing machines lock for "old-k8s-version-744018", held for 10.206151449s
	I0317 11:24:21.455895  211660 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" old-k8s-version-744018
	I0317 11:24:21.473985  211660 ssh_runner.go:195] Run: cat /version.json
	I0317 11:24:21.474042  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:21.474301  211660 ssh_runner.go:195] Run: curl -sS -m 2 https://registry.k8s.io/
	I0317 11:24:21.474361  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:24:21.501280  211660 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33053 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa Username:docker}
	I0317 11:24:21.511506  211660 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33053 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa Username:docker}
	I0317 11:24:21.594422  211660 ssh_runner.go:195] Run: systemctl --version
	I0317 11:24:21.726694  211660 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0317 11:24:21.730980  211660 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f -name *loopback.conf* -not -name *.mk_disabled -exec sh -c "grep -q loopback {} && ( grep -q name {} || sudo sed -i '/"type": "loopback"/i \ \ \ \ "name": "loopback",' {} ) && sudo sed -i 's|"cniVersion": ".*"|"cniVersion": "1.0.0"|g' {}" ;
	I0317 11:24:21.758250  211660 cni.go:230] loopback cni configuration patched: "/etc/cni/net.d/*loopback.conf*" found
	I0317 11:24:21.758352  211660 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0317 11:24:21.789378  211660 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist, /etc/cni/net.d/100-crio-bridge.conf] bridge cni config(s)
	I0317 11:24:21.789451  211660 start.go:495] detecting cgroup driver to use...
	I0317 11:24:21.789498  211660 detect.go:187] detected "cgroupfs" cgroup driver on host os
	I0317 11:24:21.789563  211660 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0317 11:24:21.801742  211660 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0317 11:24:21.813212  211660 docker.go:217] disabling cri-docker service (if available) ...
	I0317 11:24:21.813322  211660 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.socket
	I0317 11:24:21.827723  211660 ssh_runner.go:195] Run: sudo systemctl stop -f cri-docker.service
	I0317 11:24:21.842382  211660 ssh_runner.go:195] Run: sudo systemctl disable cri-docker.socket
	I0317 11:24:21.942975  211660 ssh_runner.go:195] Run: sudo systemctl mask cri-docker.service
	I0317 11:24:22.034054  211660 docker.go:233] disabling docker service ...
	I0317 11:24:22.034175  211660 ssh_runner.go:195] Run: sudo systemctl stop -f docker.socket
	I0317 11:24:22.056872  211660 ssh_runner.go:195] Run: sudo systemctl stop -f docker.service
	I0317 11:24:22.069598  211660 ssh_runner.go:195] Run: sudo systemctl disable docker.socket
	I0317 11:24:22.167823  211660 ssh_runner.go:195] Run: sudo systemctl mask docker.service
	I0317 11:24:22.269307  211660 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service docker
	I0317 11:24:22.281563  211660 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0317 11:24:22.301486  211660 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.2"|' /etc/containerd/config.toml"
	I0317 11:24:22.312397  211660 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0317 11:24:22.322221  211660 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0317 11:24:22.322288  211660 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0317 11:24:22.332164  211660 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0317 11:24:22.341875  211660 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0317 11:24:22.351342  211660 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0317 11:24:22.361003  211660 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0317 11:24:22.371678  211660 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0317 11:24:22.382936  211660 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0317 11:24:22.391732  211660 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0317 11:24:22.401505  211660 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 11:24:22.490150  211660 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0317 11:24:22.613485  211660 start.go:542] Will wait 60s for socket path /run/containerd/containerd.sock
	I0317 11:24:22.613614  211660 ssh_runner.go:195] Run: stat /run/containerd/containerd.sock
	I0317 11:24:22.617309  211660 start.go:563] Will wait 60s for crictl version
	I0317 11:24:22.617420  211660 ssh_runner.go:195] Run: which crictl
	I0317 11:24:22.621018  211660 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0317 11:24:22.662217  211660 start.go:579] Version:  0.1.0
	RuntimeName:  containerd
	RuntimeVersion:  1.7.25
	RuntimeApiVersion:  v1
	I0317 11:24:22.662318  211660 ssh_runner.go:195] Run: containerd --version
	I0317 11:24:22.685709  211660 ssh_runner.go:195] Run: containerd --version
	I0317 11:24:22.714296  211660 out.go:177] * Preparing Kubernetes v1.20.0 on containerd 1.7.25 ...
	I0317 11:24:24.306664  191740 system_pods.go:86] 7 kube-system pods found
	I0317 11:24:24.306683  191740 system_pods.go:89] "coredns-668d6bf9bc-zt5x9" [91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0317 11:24:24.306688  191740 system_pods.go:89] "etcd-pause-600904" [e487e91b-9f7b-4d94-bb70-c5c74c449aa9] Running
	I0317 11:24:24.306695  191740 system_pods.go:89] "kindnet-w9d57" [6ab3f7ec-9933-4bab-b726-a1d9b6eb0850] Pending / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0317 11:24:24.306699  191740 system_pods.go:89] "kube-apiserver-pause-600904" [04b653e3-2b1b-4168-bf73-678a8d6a6d49] Running
	I0317 11:24:24.306702  191740 system_pods.go:89] "kube-controller-manager-pause-600904" [0dedd16d-cead-4656-8906-e5b3b3f2b913] Running
	I0317 11:24:24.306707  191740 system_pods.go:89] "kube-proxy-xpp8b" [c6ae1386-0124-4848-881c-98aab2d22bf0] Running
	I0317 11:24:24.306709  191740 system_pods.go:89] "kube-scheduler-pause-600904" [51c41b6c-657a-4796-b3e5-5497d777e885] Running
	I0317 11:24:24.306722  191740 retry.go:31] will retry after 19.902407887s: missing components: kube-dns
	I0317 11:24:22.717426  211660 cli_runner.go:164] Run: docker network inspect old-k8s-version-744018 --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
	I0317 11:24:22.733263  211660 ssh_runner.go:195] Run: grep 192.168.76.1	host.minikube.internal$ /etc/hosts
	I0317 11:24:22.737122  211660 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "192.168.76.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0317 11:24:22.749805  211660 kubeadm.go:883] updating cluster {Name:old-k8s-version-744018 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-744018 Namespace:default APIServerHAVIP: APIServerName:minik
ubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false Disable
Metrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0317 11:24:22.749930  211660 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0317 11:24:22.749998  211660 ssh_runner.go:195] Run: sudo crictl images --output json
	I0317 11:24:22.790520  211660 containerd.go:627] all images are preloaded for containerd runtime.
	I0317 11:24:22.790548  211660 containerd.go:534] Images already preloaded, skipping extraction
	I0317 11:24:22.790609  211660 ssh_runner.go:195] Run: sudo crictl images --output json
	I0317 11:24:22.830308  211660 containerd.go:627] all images are preloaded for containerd runtime.
	I0317 11:24:22.830331  211660 cache_images.go:84] Images are preloaded, skipping loading
	I0317 11:24:22.830340  211660 kubeadm.go:934] updating node { 192.168.76.2 8443 v1.20.0 containerd true true} ...
	I0317 11:24:22.830434  211660 kubeadm.go:946] kubelet [Unit]
	Wants=containerd.service
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.20.0/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --container-runtime=remote --container-runtime-endpoint=unix:///run/containerd/containerd.sock --hostname-override=old-k8s-version-744018 --kubeconfig=/etc/kubernetes/kubelet.conf --network-plugin=cni --node-ip=192.168.76.2
	
	[Install]
	 config:
	{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-744018 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0317 11:24:22.830506  211660 ssh_runner.go:195] Run: sudo crictl info
	I0317 11:24:22.875323  211660 cni.go:84] Creating CNI manager for ""
	I0317 11:24:22.875347  211660 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0317 11:24:22.875358  211660 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0317 11:24:22.875377  211660 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:192.168.76.2 APIServerPort:8443 KubernetesVersion:v1.20.0 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:old-k8s-version-744018 NodeName:old-k8s-version-744018 DNSDomain:cluster.local CRISocket:/run/containerd/containerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "192.168.76.2"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:192.168.76.2 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt
StaticPodPath:/etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:false}
	I0317 11:24:22.875498  211660 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta2
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 192.168.76.2
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: /run/containerd/containerd.sock
	  name: "old-k8s-version-744018"
	  kubeletExtraArgs:
	    node-ip: 192.168.76.2
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta2
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "192.168.76.2"]
	  extraArgs:
	    enable-admission-plugins: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    allocate-node-cidrs: "true"
	    leader-elect: "false"
	scheduler:
	  extraArgs:
	    leader-elect: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	dns:
	  type: CoreDNS
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      proxy-refresh-interval: "70000"
	kubernetesVersion: v1.20.0
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0317 11:24:22.875567  211660 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.20.0
	I0317 11:24:22.885220  211660 binaries.go:44] Found k8s binaries, skipping transfer
	I0317 11:24:22.885304  211660 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0317 11:24:22.894421  211660 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (442 bytes)
	I0317 11:24:22.913867  211660 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0317 11:24:22.933803  211660 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2125 bytes)
	I0317 11:24:22.953289  211660 ssh_runner.go:195] Run: grep 192.168.76.2	control-plane.minikube.internal$ /etc/hosts
	I0317 11:24:22.956646  211660 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "192.168.76.2	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0317 11:24:22.967731  211660 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 11:24:23.070159  211660 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0317 11:24:23.091436  211660 certs.go:68] Setting up /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018 for IP: 192.168.76.2
	I0317 11:24:23.091513  211660 certs.go:194] generating shared ca certs ...
	I0317 11:24:23.091557  211660 certs.go:226] acquiring lock for ca certs: {Name:mk5a5307154bd473cdb748bc6e62d2139b42123a Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:23.091747  211660 certs.go:235] skipping valid "minikubeCA" ca cert: /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key
	I0317 11:24:23.091824  211660 certs.go:235] skipping valid "proxyClientCA" ca cert: /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key
	I0317 11:24:23.091847  211660 certs.go:256] generating profile certs ...
	I0317 11:24:23.091932  211660 certs.go:363] generating signed profile cert for "minikube-user": /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/client.key
	I0317 11:24:23.091972  211660 crypto.go:68] Generating cert /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/client.crt with IP's: []
	I0317 11:24:23.943700  211660 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/client.crt ...
	I0317 11:24:23.943738  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/client.crt: {Name:mkb5eab01352c75a93ad56bbfcaba91705a48f60 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:23.943931  211660 crypto.go:164] Writing key to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/client.key ...
	I0317 11:24:23.943947  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/client.key: {Name:mkcec6684963c6838d076c1e09e674b5b43813d5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:23.944524  211660 certs.go:363] generating signed profile cert for "minikube": /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.key.64b72305
	I0317 11:24:23.944548  211660 crypto.go:68] Generating cert /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.crt.64b72305 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 192.168.76.2]
	I0317 11:24:24.183314  211660 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.crt.64b72305 ...
	I0317 11:24:24.183343  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.crt.64b72305: {Name:mk14a6f24b7aa864b3aed47a87598b87308a69bb Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:24.184049  211660 crypto.go:164] Writing key to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.key.64b72305 ...
	I0317 11:24:24.184112  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.key.64b72305: {Name:mk696e6e400e7e824221ad571c05a42830abbe5b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:24.184674  211660 certs.go:381] copying /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.crt.64b72305 -> /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.crt
	I0317 11:24:24.184814  211660 certs.go:385] copying /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.key.64b72305 -> /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.key
	I0317 11:24:24.184901  211660 certs.go:363] generating signed profile cert for "aggregator": /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.key
	I0317 11:24:24.184950  211660 crypto.go:68] Generating cert /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.crt with IP's: []
	I0317 11:24:24.516302  211660 crypto.go:156] Writing cert to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.crt ...
	I0317 11:24:24.516335  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.crt: {Name:mk8a3ea47ebf285a5a3d70826505555496622131 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:24.516521  211660 crypto.go:164] Writing key to /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.key ...
	I0317 11:24:24.516538  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.key: {Name:mk20e951eeebc01a947afb5331588144a7f3e70f Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:24:24.516733  211660 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem (1338 bytes)
	W0317 11:24:24.516780  211660 certs.go:480] ignoring /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572_empty.pem, impossibly tiny 0 bytes
	I0317 11:24:24.516794  211660 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca-key.pem (1675 bytes)
	I0317 11:24:24.516821  211660 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/ca.pem (1078 bytes)
	I0317 11:24:24.516852  211660 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/cert.pem (1123 bytes)
	I0317 11:24:24.516886  211660 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/certs/key.pem (1679 bytes)
	I0317 11:24:24.516934  211660 certs.go:484] found cert: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem (1708 bytes)
	I0317 11:24:24.517501  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0317 11:24:24.543435  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0317 11:24:24.568655  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0317 11:24:24.594017  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1675 bytes)
	I0317 11:24:24.618495  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1432 bytes)
	I0317 11:24:24.643224  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/apiserver.key --> /var/lib/minikube/certs/apiserver.key (1675 bytes)
	I0317 11:24:24.667693  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0317 11:24:24.695436  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0317 11:24:24.722098  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/ssl/certs/75722.pem --> /usr/share/ca-certificates/75722.pem (1708 bytes)
	I0317 11:24:24.748233  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0317 11:24:24.774123  211660 ssh_runner.go:362] scp /home/jenkins/minikube-integration/20535-2262/.minikube/certs/7572.pem --> /usr/share/ca-certificates/7572.pem (1338 bytes)
	I0317 11:24:24.799143  211660 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0317 11:24:24.825161  211660 ssh_runner.go:195] Run: openssl version
	I0317 11:24:24.830749  211660 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/75722.pem && ln -fs /usr/share/ca-certificates/75722.pem /etc/ssl/certs/75722.pem"
	I0317 11:24:24.845495  211660 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/75722.pem
	I0317 11:24:24.849440  211660 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Mar 17 10:33 /usr/share/ca-certificates/75722.pem
	I0317 11:24:24.849523  211660 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/75722.pem
	I0317 11:24:24.856818  211660 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/75722.pem /etc/ssl/certs/3ec20f2e.0"
	I0317 11:24:24.868148  211660 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0317 11:24:24.879049  211660 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0317 11:24:24.883167  211660 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Mar 17 10:26 /usr/share/ca-certificates/minikubeCA.pem
	I0317 11:24:24.883270  211660 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0317 11:24:24.890610  211660 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0317 11:24:24.901145  211660 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7572.pem && ln -fs /usr/share/ca-certificates/7572.pem /etc/ssl/certs/7572.pem"
	I0317 11:24:24.913040  211660 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7572.pem
	I0317 11:24:24.917181  211660 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Mar 17 10:33 /usr/share/ca-certificates/7572.pem
	I0317 11:24:24.917251  211660 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7572.pem
	I0317 11:24:24.924732  211660 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7572.pem /etc/ssl/certs/51391683.0"
	I0317 11:24:24.936992  211660 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0317 11:24:24.942684  211660 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0317 11:24:24.942758  211660 kubeadm.go:392] StartCluster: {Name:old-k8s-version-744018 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:true NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:old-k8s-version-744018 Namespace:default APIServerHAVIP: APIServerName:minikube
CA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMet
rics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 11:24:24.942845  211660 cri.go:54] listing CRI containers in root /run/containerd/runc/k8s.io: {State:paused Name: Namespaces:[kube-system]}
	I0317 11:24:24.942927  211660 ssh_runner.go:195] Run: sudo -s eval "crictl ps -a --quiet --label io.kubernetes.pod.namespace=kube-system"
	I0317 11:24:24.979888  211660 cri.go:89] found id: ""
	I0317 11:24:24.979970  211660 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0317 11:24:24.988942  211660 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0317 11:24:24.997807  211660 kubeadm.go:214] ignoring SystemVerification for kubeadm because of docker driver
	I0317 11:24:24.997881  211660 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0317 11:24:25.006963  211660 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0317 11:24:25.007041  211660 kubeadm.go:157] found existing configuration files:
	
	I0317 11:24:25.007124  211660 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0317 11:24:25.016011  211660 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0317 11:24:25.016096  211660 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0317 11:24:25.026345  211660 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0317 11:24:25.035817  211660 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0317 11:24:25.035929  211660 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0317 11:24:25.045618  211660 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0317 11:24:25.054977  211660 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0317 11:24:25.055050  211660 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0317 11:24:25.063773  211660 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0317 11:24:25.072843  211660 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0317 11:24:25.072911  211660 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0317 11:24:25.081493  211660 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.20.0:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem,SystemVerification,FileContent--proc-sys-net-bridge-bridge-nf-call-iptables"
	I0317 11:24:25.202901  211660 kubeadm.go:310] 	[WARNING SystemVerification]: failed to parse kernel config: unable to load kernel module: "configs", output: "modprobe: FATAL: Module configs not found in directory /lib/modules/5.15.0-1077-aws\n", err: exit status 1
	I0317 11:24:25.309137  211660 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0317 11:24:44.213530  191740 system_pods.go:86] 7 kube-system pods found
	I0317 11:24:44.213548  191740 system_pods.go:89] "coredns-668d6bf9bc-zt5x9" [91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0317 11:24:44.213553  191740 system_pods.go:89] "etcd-pause-600904" [e487e91b-9f7b-4d94-bb70-c5c74c449aa9] Running
	I0317 11:24:44.213560  191740 system_pods.go:89] "kindnet-w9d57" [6ab3f7ec-9933-4bab-b726-a1d9b6eb0850] Pending / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0317 11:24:44.213564  191740 system_pods.go:89] "kube-apiserver-pause-600904" [04b653e3-2b1b-4168-bf73-678a8d6a6d49] Running
	I0317 11:24:44.213568  191740 system_pods.go:89] "kube-controller-manager-pause-600904" [0dedd16d-cead-4656-8906-e5b3b3f2b913] Running
	I0317 11:24:44.213571  191740 system_pods.go:89] "kube-proxy-xpp8b" [c6ae1386-0124-4848-881c-98aab2d22bf0] Running
	I0317 11:24:44.213574  191740 system_pods.go:89] "kube-scheduler-pause-600904" [51c41b6c-657a-4796-b3e5-5497d777e885] Running
	I0317 11:24:44.213585  191740 retry.go:31] will retry after 21.577953545s: missing components: kube-dns
	I0317 11:24:55.666456  211660 kubeadm.go:310] [init] Using Kubernetes version: v1.20.0
	I0317 11:24:55.666537  211660 kubeadm.go:310] [preflight] Running pre-flight checks
	I0317 11:24:55.666644  211660 kubeadm.go:310] [preflight] The system verification failed. Printing the output from the verification:
	I0317 11:24:55.666731  211660 kubeadm.go:310] KERNEL_VERSION: 5.15.0-1077-aws
	I0317 11:24:55.666774  211660 kubeadm.go:310] OS: Linux
	I0317 11:24:55.666825  211660 kubeadm.go:310] CGROUPS_CPU: enabled
	I0317 11:24:55.666942  211660 kubeadm.go:310] CGROUPS_CPUACCT: enabled
	I0317 11:24:55.667010  211660 kubeadm.go:310] CGROUPS_CPUSET: enabled
	I0317 11:24:55.667065  211660 kubeadm.go:310] CGROUPS_DEVICES: enabled
	I0317 11:24:55.667112  211660 kubeadm.go:310] CGROUPS_FREEZER: enabled
	I0317 11:24:55.667187  211660 kubeadm.go:310] CGROUPS_MEMORY: enabled
	I0317 11:24:55.667244  211660 kubeadm.go:310] CGROUPS_PIDS: enabled
	I0317 11:24:55.667292  211660 kubeadm.go:310] CGROUPS_HUGETLB: enabled
	I0317 11:24:55.667364  211660 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0317 11:24:55.667460  211660 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0317 11:24:55.667552  211660 kubeadm.go:310] [preflight] You can also perform this action in beforehand using 'kubeadm config images pull'
	I0317 11:24:55.667619  211660 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0317 11:24:55.670729  211660 out.go:235]   - Generating certificates and keys ...
	I0317 11:24:55.670815  211660 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0317 11:24:55.670950  211660 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0317 11:24:55.671019  211660 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0317 11:24:55.671082  211660 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0317 11:24:55.671145  211660 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0317 11:24:55.671199  211660 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0317 11:24:55.671258  211660 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0317 11:24:55.671385  211660 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost old-k8s-version-744018] and IPs [192.168.76.2 127.0.0.1 ::1]
	I0317 11:24:55.671442  211660 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0317 11:24:55.671567  211660 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost old-k8s-version-744018] and IPs [192.168.76.2 127.0.0.1 ::1]
	I0317 11:24:55.671642  211660 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0317 11:24:55.671710  211660 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0317 11:24:55.671758  211660 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0317 11:24:55.671818  211660 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0317 11:24:55.671872  211660 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0317 11:24:55.671928  211660 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0317 11:24:55.671994  211660 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0317 11:24:55.672054  211660 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0317 11:24:55.672158  211660 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0317 11:24:55.672245  211660 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0317 11:24:55.672287  211660 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0317 11:24:55.672364  211660 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0317 11:24:55.675249  211660 out.go:235]   - Booting up control plane ...
	I0317 11:24:55.675411  211660 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0317 11:24:55.675538  211660 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0317 11:24:55.675617  211660 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0317 11:24:55.675707  211660 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0317 11:24:55.675870  211660 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests". This can take up to 4m0s
	I0317 11:24:55.675957  211660 kubeadm.go:310] [apiclient] All control plane components are healthy after 19.002887 seconds
	I0317 11:24:55.676075  211660 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0317 11:24:55.676210  211660 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config-1.20" in namespace kube-system with the configuration for the kubelets in the cluster
	I0317 11:24:55.676281  211660 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0317 11:24:55.676483  211660 kubeadm.go:310] [mark-control-plane] Marking the node old-k8s-version-744018 as control-plane by adding the labels "node-role.kubernetes.io/master=''" and "node-role.kubernetes.io/control-plane='' (deprecated)"
	I0317 11:24:55.676551  211660 kubeadm.go:310] [bootstrap-token] Using token: s80v1i.yhw5xybndfdim00k
	I0317 11:24:55.681559  211660 out.go:235]   - Configuring RBAC rules ...
	I0317 11:24:55.681686  211660 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0317 11:24:55.681776  211660 kubeadm.go:310] [bootstrap-token] configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0317 11:24:55.681920  211660 kubeadm.go:310] [bootstrap-token] configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0317 11:24:55.682054  211660 kubeadm.go:310] [bootstrap-token] configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0317 11:24:55.682179  211660 kubeadm.go:310] [bootstrap-token] configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0317 11:24:55.682280  211660 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0317 11:24:55.682399  211660 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0317 11:24:55.682446  211660 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0317 11:24:55.682495  211660 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0317 11:24:55.682503  211660 kubeadm.go:310] 
	I0317 11:24:55.682563  211660 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0317 11:24:55.682570  211660 kubeadm.go:310] 
	I0317 11:24:55.682646  211660 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0317 11:24:55.682654  211660 kubeadm.go:310] 
	I0317 11:24:55.682679  211660 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0317 11:24:55.682741  211660 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0317 11:24:55.682794  211660 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0317 11:24:55.682802  211660 kubeadm.go:310] 
	I0317 11:24:55.682856  211660 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0317 11:24:55.682983  211660 kubeadm.go:310] 
	I0317 11:24:55.683067  211660 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0317 11:24:55.683079  211660 kubeadm.go:310] 
	I0317 11:24:55.683150  211660 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0317 11:24:55.683261  211660 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0317 11:24:55.683369  211660 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0317 11:24:55.683381  211660 kubeadm.go:310] 
	I0317 11:24:55.683490  211660 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0317 11:24:55.683607  211660 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0317 11:24:55.683618  211660 kubeadm.go:310] 
	I0317 11:24:55.683727  211660 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token s80v1i.yhw5xybndfdim00k \
	I0317 11:24:55.683872  211660 kubeadm.go:310]     --discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 \
	I0317 11:24:55.683924  211660 kubeadm.go:310]     --control-plane 
	I0317 11:24:55.683935  211660 kubeadm.go:310] 
	I0317 11:24:55.684043  211660 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0317 11:24:55.684056  211660 kubeadm.go:310] 
	I0317 11:24:55.684159  211660 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token s80v1i.yhw5xybndfdim00k \
	I0317 11:24:55.684320  211660 kubeadm.go:310]     --discovery-token-ca-cert-hash sha256:82372d7d6f2d0ece0140839ab0643a36f9b5c212eeca6b196867deda9818a516 
	I0317 11:24:55.684366  211660 cni.go:84] Creating CNI manager for ""
	I0317 11:24:55.684380  211660 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0317 11:24:55.689214  211660 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0317 11:24:55.692144  211660 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0317 11:24:55.697276  211660 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.20.0/kubectl ...
	I0317 11:24:55.697297  211660 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0317 11:24:55.729358  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0317 11:24:56.360526  211660 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0317 11:24:56.360643  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:24:56.360725  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes old-k8s-version-744018 minikube.k8s.io/updated_at=2025_03_17T11_24_56_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=28b3ce799b018a38b7c40f89b465976263272e76 minikube.k8s.io/name=old-k8s-version-744018 minikube.k8s.io/primary=true
	I0317 11:24:56.530456  211660 ops.go:34] apiserver oom_adj: -16
	I0317 11:24:56.530570  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:24:57.030705  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:24:57.531309  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:24:58.030691  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:24:58.531115  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:24:59.031436  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:24:59.531086  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:00.035168  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:00.531439  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:01.031662  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:01.530940  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:02.032619  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:02.530992  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:03.030645  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:03.531382  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:04.030672  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:04.531175  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:05.031023  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:05.531316  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:05.794791  191740 system_pods.go:86] 7 kube-system pods found
	I0317 11:25:05.794811  191740 system_pods.go:89] "coredns-668d6bf9bc-zt5x9" [91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0317 11:25:05.794816  191740 system_pods.go:89] "etcd-pause-600904" [e487e91b-9f7b-4d94-bb70-c5c74c449aa9] Running
	I0317 11:25:05.794825  191740 system_pods.go:89] "kindnet-w9d57" [6ab3f7ec-9933-4bab-b726-a1d9b6eb0850] Pending / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0317 11:25:05.794829  191740 system_pods.go:89] "kube-apiserver-pause-600904" [04b653e3-2b1b-4168-bf73-678a8d6a6d49] Running
	I0317 11:25:05.794833  191740 system_pods.go:89] "kube-controller-manager-pause-600904" [0dedd16d-cead-4656-8906-e5b3b3f2b913] Running
	I0317 11:25:05.794836  191740 system_pods.go:89] "kube-proxy-xpp8b" [c6ae1386-0124-4848-881c-98aab2d22bf0] Running
	I0317 11:25:05.794839  191740 system_pods.go:89] "kube-scheduler-pause-600904" [51c41b6c-657a-4796-b3e5-5497d777e885] Running
	I0317 11:25:05.794852  191740 retry.go:31] will retry after 30.601920954s: missing components: kube-dns
	I0317 11:25:06.031645  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:06.531333  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:07.030767  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:07.531474  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:08.030662  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:08.530704  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:09.030727  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:09.531529  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:10.031042  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:10.531345  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:11.031053  211660 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.20.0/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0317 11:25:11.283772  211660 kubeadm.go:1113] duration metric: took 14.923189335s to wait for elevateKubeSystemPrivileges
	I0317 11:25:11.283804  211660 kubeadm.go:394] duration metric: took 46.341052137s to StartCluster
	I0317 11:25:11.283822  211660 settings.go:142] acquiring lock: {Name:mk05e4f82496d9c1bce10f4ad315347825261fba Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:25:11.283880  211660 settings.go:150] Updating kubeconfig:  /home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 11:25:11.284839  211660 lock.go:35] WriteFile acquiring /home/jenkins/minikube-integration/20535-2262/kubeconfig: {Name:mk08eb5822f827f6c2a387a47497144ae27dff3b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0317 11:25:11.285059  211660 start.go:235] Will wait 6m0s for node &{Name: IP:192.168.76.2 Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}
	I0317 11:25:11.285163  211660 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.20.0/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0317 11:25:11.285406  211660 config.go:182] Loaded profile config "old-k8s-version-744018": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.20.0
	I0317 11:25:11.285513  211660 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0317 11:25:11.285580  211660 addons.go:69] Setting storage-provisioner=true in profile "old-k8s-version-744018"
	I0317 11:25:11.285596  211660 addons.go:238] Setting addon storage-provisioner=true in "old-k8s-version-744018"
	I0317 11:25:11.285621  211660 host.go:66] Checking if "old-k8s-version-744018" exists ...
	I0317 11:25:11.286177  211660 cli_runner.go:164] Run: docker container inspect old-k8s-version-744018 --format={{.State.Status}}
	I0317 11:25:11.286653  211660 addons.go:69] Setting default-storageclass=true in profile "old-k8s-version-744018"
	I0317 11:25:11.286674  211660 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "old-k8s-version-744018"
	I0317 11:25:11.287002  211660 cli_runner.go:164] Run: docker container inspect old-k8s-version-744018 --format={{.State.Status}}
	I0317 11:25:11.288635  211660 out.go:177] * Verifying Kubernetes components...
	I0317 11:25:11.291534  211660 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0317 11:25:11.326344  211660 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0317 11:25:11.329953  211660 addons.go:238] Setting addon default-storageclass=true in "old-k8s-version-744018"
	I0317 11:25:11.329993  211660 host.go:66] Checking if "old-k8s-version-744018" exists ...
	I0317 11:25:11.330409  211660 cli_runner.go:164] Run: docker container inspect old-k8s-version-744018 --format={{.State.Status}}
	I0317 11:25:11.332577  211660 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0317 11:25:11.332598  211660 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0317 11:25:11.332653  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:25:11.363064  211660 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0317 11:25:11.363085  211660 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0317 11:25:11.363149  211660 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" old-k8s-version-744018
	I0317 11:25:11.375526  211660 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33053 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa Username:docker}
	I0317 11:25:11.401509  211660 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:33053 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/old-k8s-version-744018/id_rsa Username:docker}
	I0317 11:25:11.562452  211660 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0317 11:25:11.593139  211660 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0317 11:25:11.593290  211660 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.20.0/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           192.168.76.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.20.0/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0317 11:25:11.597045  211660 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.20.0/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0317 11:25:12.480337  211660 start.go:971] {"host.minikube.internal": 192.168.76.1} host record injected into CoreDNS's ConfigMap
	I0317 11:25:12.481399  211660 node_ready.go:35] waiting up to 6m0s for node "old-k8s-version-744018" to be "Ready" ...
	I0317 11:25:12.518371  211660 node_ready.go:49] node "old-k8s-version-744018" has status "Ready":"True"
	I0317 11:25:12.518439  211660 node_ready.go:38] duration metric: took 37.011487ms for node "old-k8s-version-744018" to be "Ready" ...
	I0317 11:25:12.518464  211660 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0317 11:25:12.532234  211660 pod_ready.go:79] waiting up to 6m0s for pod "coredns-74ff55c5b-7q5js" in "kube-system" namespace to be "Ready" ...
	I0317 11:25:12.535088  211660 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0317 11:25:12.537994  211660 addons.go:514] duration metric: took 1.252471315s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0317 11:25:12.985296  211660 kapi.go:214] "coredns" deployment in "kube-system" namespace and "old-k8s-version-744018" context rescaled to 1 replicas
	I0317 11:25:13.040047  211660 pod_ready.go:98] error getting pod "coredns-74ff55c5b-7q5js" in "kube-system" namespace (skipping!): pods "coredns-74ff55c5b-7q5js" not found
	I0317 11:25:13.040074  211660 pod_ready.go:82] duration metric: took 507.711116ms for pod "coredns-74ff55c5b-7q5js" in "kube-system" namespace to be "Ready" ...
	E0317 11:25:13.040087  211660 pod_ready.go:67] WaitExtra: waitPodCondition: error getting pod "coredns-74ff55c5b-7q5js" in "kube-system" namespace (skipping!): pods "coredns-74ff55c5b-7q5js" not found
	I0317 11:25:13.040116  211660 pod_ready.go:79] waiting up to 6m0s for pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace to be "Ready" ...
	I0317 11:25:15.056456  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:17.545880  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:19.545933  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:22.047241  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:24.545169  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:26.546238  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:29.045961  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:31.047405  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:33.047929  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:35.546299  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:36.399915  191740 system_pods.go:86] 7 kube-system pods found
	I0317 11:25:36.399936  191740 system_pods.go:89] "coredns-668d6bf9bc-zt5x9" [91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0317 11:25:36.399942  191740 system_pods.go:89] "etcd-pause-600904" [e487e91b-9f7b-4d94-bb70-c5c74c449aa9] Running
	I0317 11:25:36.399948  191740 system_pods.go:89] "kindnet-w9d57" [6ab3f7ec-9933-4bab-b726-a1d9b6eb0850] Pending / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0317 11:25:36.399951  191740 system_pods.go:89] "kube-apiserver-pause-600904" [04b653e3-2b1b-4168-bf73-678a8d6a6d49] Running
	I0317 11:25:36.399954  191740 system_pods.go:89] "kube-controller-manager-pause-600904" [0dedd16d-cead-4656-8906-e5b3b3f2b913] Running
	I0317 11:25:36.399957  191740 system_pods.go:89] "kube-proxy-xpp8b" [c6ae1386-0124-4848-881c-98aab2d22bf0] Running
	I0317 11:25:36.399960  191740 system_pods.go:89] "kube-scheduler-pause-600904" [51c41b6c-657a-4796-b3e5-5497d777e885] Running
	I0317 11:25:36.399972  191740 retry.go:31] will retry after 50.230938958s: missing components: kube-dns
	I0317 11:25:38.047415  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:40.048625  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:42.545572  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:45.064403  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:47.546483  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:50.059354  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:52.545138  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:54.545888  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:57.047006  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:25:59.545563  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:01.546512  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:04.045557  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:06.053870  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:08.545411  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:10.546576  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:13.045897  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:15.050436  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:17.546049  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:20.046307  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:22.545333  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:24.546276  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:26.634826  191740 system_pods.go:86] 7 kube-system pods found
	I0317 11:26:26.634846  191740 system_pods.go:89] "coredns-668d6bf9bc-zt5x9" [91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0317 11:26:26.634851  191740 system_pods.go:89] "etcd-pause-600904" [e487e91b-9f7b-4d94-bb70-c5c74c449aa9] Running
	I0317 11:26:26.634858  191740 system_pods.go:89] "kindnet-w9d57" [6ab3f7ec-9933-4bab-b726-a1d9b6eb0850] Pending / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0317 11:26:26.634862  191740 system_pods.go:89] "kube-apiserver-pause-600904" [04b653e3-2b1b-4168-bf73-678a8d6a6d49] Running
	I0317 11:26:26.634913  191740 system_pods.go:89] "kube-controller-manager-pause-600904" [0dedd16d-cead-4656-8906-e5b3b3f2b913] Running
	I0317 11:26:26.634917  191740 system_pods.go:89] "kube-proxy-xpp8b" [c6ae1386-0124-4848-881c-98aab2d22bf0] Running
	I0317 11:26:26.634920  191740 system_pods.go:89] "kube-scheduler-pause-600904" [51c41b6c-657a-4796-b3e5-5497d777e885] Running
	I0317 11:26:26.634933  191740 retry.go:31] will retry after 59.277105959s: missing components: kube-dns
	I0317 11:26:27.047865  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:29.545225  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:31.546345  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:34.047347  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:36.546217  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:39.045952  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:41.047221  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:43.545573  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:46.047580  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:48.545420  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:50.546494  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:53.045906  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:55.046696  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:57.545753  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:26:59.546548  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:02.047450  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:04.546012  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:07.046133  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:09.047248  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:11.048669  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:13.545940  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:16.046160  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:18.046188  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:20.046743  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:22.546625  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:25.047369  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:25.916306  191740 system_pods.go:86] 7 kube-system pods found
	I0317 11:27:25.916325  191740 system_pods.go:89] "coredns-668d6bf9bc-zt5x9" [91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0317 11:27:25.916330  191740 system_pods.go:89] "etcd-pause-600904" [e487e91b-9f7b-4d94-bb70-c5c74c449aa9] Running
	I0317 11:27:25.916338  191740 system_pods.go:89] "kindnet-w9d57" [6ab3f7ec-9933-4bab-b726-a1d9b6eb0850] Pending / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0317 11:27:25.916343  191740 system_pods.go:89] "kube-apiserver-pause-600904" [04b653e3-2b1b-4168-bf73-678a8d6a6d49] Running
	I0317 11:27:25.916347  191740 system_pods.go:89] "kube-controller-manager-pause-600904" [0dedd16d-cead-4656-8906-e5b3b3f2b913] Running
	I0317 11:27:25.916349  191740 system_pods.go:89] "kube-proxy-xpp8b" [c6ae1386-0124-4848-881c-98aab2d22bf0] Running
	I0317 11:27:25.916352  191740 system_pods.go:89] "kube-scheduler-pause-600904" [51c41b6c-657a-4796-b3e5-5497d777e885] Running
	I0317 11:27:25.916363  191740 retry.go:31] will retry after 54.278086035s: missing components: kube-dns
	I0317 11:27:27.547230  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:30.070632  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:32.546299  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:35.046993  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:37.047472  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:39.546497  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:42.046179  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:44.052131  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:46.545624  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:48.546012  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:51.047421  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:53.545668  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:56.046566  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:27:58.546401  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:01.046027  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:03.046717  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:05.051804  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:07.545606  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:10.045828  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:12.545754  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:14.545866  211660 pod_ready.go:103] pod "coredns-74ff55c5b-rdmgh" in "kube-system" namespace has status "Ready":"False"
	I0317 11:28:20.197551  191740 system_pods.go:86] 7 kube-system pods found
	I0317 11:28:20.197589  191740 system_pods.go:89] "coredns-668d6bf9bc-zt5x9" [91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee] Pending / Ready:ContainersNotReady (containers with unready status: [coredns]) / ContainersReady:ContainersNotReady (containers with unready status: [coredns])
	I0317 11:28:20.197595  191740 system_pods.go:89] "etcd-pause-600904" [e487e91b-9f7b-4d94-bb70-c5c74c449aa9] Running
	I0317 11:28:20.197602  191740 system_pods.go:89] "kindnet-w9d57" [6ab3f7ec-9933-4bab-b726-a1d9b6eb0850] Pending / Ready:ContainersNotReady (containers with unready status: [kindnet-cni]) / ContainersReady:ContainersNotReady (containers with unready status: [kindnet-cni])
	I0317 11:28:20.197605  191740 system_pods.go:89] "kube-apiserver-pause-600904" [04b653e3-2b1b-4168-bf73-678a8d6a6d49] Running
	I0317 11:28:20.197608  191740 system_pods.go:89] "kube-controller-manager-pause-600904" [0dedd16d-cead-4656-8906-e5b3b3f2b913] Running
	I0317 11:28:20.197612  191740 system_pods.go:89] "kube-proxy-xpp8b" [c6ae1386-0124-4848-881c-98aab2d22bf0] Running
	I0317 11:28:20.197614  191740 system_pods.go:89] "kube-scheduler-pause-600904" [51c41b6c-657a-4796-b3e5-5497d777e885] Running
	I0317 11:28:20.200792  191740 out.go:201] 
	W0317 11:28:20.203737  191740 out.go:270] X Exiting due to GUEST_START: failed to start node: wait 6m0s for node: waiting for apps_running: expected k8s-apps: missing components: kube-dns
	W0317 11:28:20.203763  191740 out.go:270] * 
	W0317 11:28:20.204628  191740 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0317 11:28:20.208837  191740 out.go:201] 
	
	
	==> container status <==
	CONTAINER           IMAGE               CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	971796530cee9       e5aac5df76d9b       9 minutes ago       Running             kube-proxy                0                   6c3bdbd4f3dd8       kube-proxy-xpp8b
	b44e7c74dba78       6417e1437b6d9       10 minutes ago      Running             kube-apiserver            0                   3a1c651352281       kube-apiserver-pause-600904
	5e4eed930d5d9       3c9285acfd2ff       10 minutes ago      Running             kube-controller-manager   0                   f13556bcd73b4       kube-controller-manager-pause-600904
	67dba1705f273       7fc9d4aa817aa       10 minutes ago      Running             etcd                      0                   91c180707a793       etcd-pause-600904
	0d46faee4abc1       82dfa03f692fb       10 minutes ago      Running             kube-scheduler            0                   39af4a528ed5e       kube-scheduler-pause-600904
	
	
	==> containerd <==
	Mar 17 11:25:43 pause-600904 containerd[837]: time="2025-03-17T11:25:43.120646292Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"70059ab731920452925bae6844b441dd82a34ab19f38eda6e2e1f5a328fab65e\": failed to find network info for sandbox \"70059ab731920452925bae6844b441dd82a34ab19f38eda6e2e1f5a328fab65e\""
	Mar 17 11:25:56 pause-600904 containerd[837]: time="2025-03-17T11:25:56.086549180Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:25:56 pause-600904 containerd[837]: time="2025-03-17T11:25:56.126458238Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"f753f8e06b41b42fc0d95e6cd374a6ccce8743139d63233a0cec5d1da984a10b\": failed to find network info for sandbox \"f753f8e06b41b42fc0d95e6cd374a6ccce8743139d63233a0cec5d1da984a10b\""
	Mar 17 11:26:11 pause-600904 containerd[837]: time="2025-03-17T11:26:11.087200360Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:26:11 pause-600904 containerd[837]: time="2025-03-17T11:26:11.127509479Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"2b28db57c03ceed5e4c1f624362b72ed23951cb2badbb1039810fed45845e9f5\": failed to find network info for sandbox \"2b28db57c03ceed5e4c1f624362b72ed23951cb2badbb1039810fed45845e9f5\""
	Mar 17 11:26:25 pause-600904 containerd[837]: time="2025-03-17T11:26:25.087058674Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:26:25 pause-600904 containerd[837]: time="2025-03-17T11:26:25.127296120Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"20d6191e8ea6db5e49be8bf58583447942a0ab698c6f75e76347b95e8e9612bc\": failed to find network info for sandbox \"20d6191e8ea6db5e49be8bf58583447942a0ab698c6f75e76347b95e8e9612bc\""
	Mar 17 11:26:36 pause-600904 containerd[837]: time="2025-03-17T11:26:36.087530119Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:26:36 pause-600904 containerd[837]: time="2025-03-17T11:26:36.146009712Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"e30e966f808d3fb818cda36558e6e4269c55dff1a91792ae664ee85cf924b927\": failed to find network info for sandbox \"e30e966f808d3fb818cda36558e6e4269c55dff1a91792ae664ee85cf924b927\""
	Mar 17 11:26:50 pause-600904 containerd[837]: time="2025-03-17T11:26:50.087293612Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:26:50 pause-600904 containerd[837]: time="2025-03-17T11:26:50.153426999Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"d76e03b603b24b507bdf61373e5f6c42eaaf07794ce35ca66949de035cf4db2d\": failed to find network info for sandbox \"d76e03b603b24b507bdf61373e5f6c42eaaf07794ce35ca66949de035cf4db2d\""
	Mar 17 11:27:01 pause-600904 containerd[837]: time="2025-03-17T11:27:01.087732042Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:27:01 pause-600904 containerd[837]: time="2025-03-17T11:27:01.126187440Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"28f0e7af32b1e3856669d5135d46bf87279880c1c4a7027d2533013c266c4617\": failed to find network info for sandbox \"28f0e7af32b1e3856669d5135d46bf87279880c1c4a7027d2533013c266c4617\""
	Mar 17 11:27:14 pause-600904 containerd[837]: time="2025-03-17T11:27:14.086587663Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:27:14 pause-600904 containerd[837]: time="2025-03-17T11:27:14.124987842Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"e29cc897a829d8f1a602a71e90d9025b91b45b93908255b7d2412221c1d72913\": failed to find network info for sandbox \"e29cc897a829d8f1a602a71e90d9025b91b45b93908255b7d2412221c1d72913\""
	Mar 17 11:27:29 pause-600904 containerd[837]: time="2025-03-17T11:27:29.089180926Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:27:29 pause-600904 containerd[837]: time="2025-03-17T11:27:29.142238528Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\": failed to find network info for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\""
	Mar 17 11:27:43 pause-600904 containerd[837]: time="2025-03-17T11:27:43.086489139Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:27:43 pause-600904 containerd[837]: time="2025-03-17T11:27:43.118346767Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\": failed to find network info for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\""
	Mar 17 11:27:56 pause-600904 containerd[837]: time="2025-03-17T11:27:56.085972606Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:27:56 pause-600904 containerd[837]: time="2025-03-17T11:27:56.122017352Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\": failed to find network info for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\""
	Mar 17 11:28:08 pause-600904 containerd[837]: time="2025-03-17T11:28:08.086431064Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:28:08 pause-600904 containerd[837]: time="2025-03-17T11:28:08.124463540Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\": failed to find network info for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\""
	Mar 17 11:28:19 pause-600904 containerd[837]: time="2025-03-17T11:28:19.086049034Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,}"
	Mar 17 11:28:19 pause-600904 containerd[837]: time="2025-03-17T11:28:19.145674351Z" level=error msg="RunPodSandbox for &PodSandboxMetadata{Name:coredns-668d6bf9bc-zt5x9,Uid:91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee,Namespace:kube-system,Attempt:0,} failed, error" error="failed to setup network for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\": failed to find network info for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\""
	
	
	==> describe nodes <==
	Name:               pause-600904
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=arm64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=arm64
	                    kubernetes.io/hostname=pause-600904
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=28b3ce799b018a38b7c40f89b465976263272e76
	                    minikube.k8s.io/name=pause-600904
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_03_17T11_18_23_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///run/containerd/containerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Mon, 17 Mar 2025 11:18:20 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  pause-600904
	  AcquireTime:     <unset>
	  RenewTime:       Mon, 17 Mar 2025 11:28:16 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Mon, 17 Mar 2025 11:25:11 +0000   Mon, 17 Mar 2025 11:18:15 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Mon, 17 Mar 2025 11:25:11 +0000   Mon, 17 Mar 2025 11:18:15 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Mon, 17 Mar 2025 11:25:11 +0000   Mon, 17 Mar 2025 11:18:15 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Mon, 17 Mar 2025 11:25:11 +0000   Mon, 17 Mar 2025 11:18:20 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  192.168.85.2
	  Hostname:    pause-600904
	Capacity:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022296Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  203034800Ki
	  hugepages-1Gi:      0
	  hugepages-2Mi:      0
	  hugepages-32Mi:     0
	  hugepages-64Ki:     0
	  memory:             8022296Ki
	  pods:               110
	System Info:
	  Machine ID:                 fd1b7b2e691a47af89a12a3e5e7b1567
	  System UUID:                5be13c9a-0a90-4be4-bb7d-ef3d94136f20
	  Boot ID:                    6191d711-482a-47cf-8e52-43bf2fb89a15
	  Kernel Version:             5.15.0-1077-aws
	  OS Image:                   Ubuntu 22.04.5 LTS
	  Operating System:           linux
	  Architecture:               arm64
	  Container Runtime Version:  containerd://1.7.25
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (7 in total)
	  Namespace                   Name                                    CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                    ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-668d6bf9bc-zt5x9                100m (5%)     0 (0%)      70Mi (0%)        170Mi (2%)     9m54s
	  kube-system                 etcd-pause-600904                       100m (5%)     0 (0%)      100Mi (1%)       0 (0%)         9m58s
	  kube-system                 kindnet-w9d57                           100m (5%)     100m (5%)   50Mi (0%)        50Mi (0%)      9m54s
	  kube-system                 kube-apiserver-pause-600904             250m (12%)    0 (0%)      0 (0%)           0 (0%)         9m58s
	  kube-system                 kube-controller-manager-pause-600904    200m (10%)    0 (0%)      0 (0%)           0 (0%)         9m58s
	  kube-system                 kube-proxy-xpp8b                        0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m54s
	  kube-system                 kube-scheduler-pause-600904             100m (5%)     0 (0%)      0 (0%)           0 (0%)         9m58s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                850m (42%)  100m (5%)
	  memory             220Mi (2%)  220Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-1Gi      0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	  hugepages-32Mi     0 (0%)      0 (0%)
	  hugepages-64Ki     0 (0%)      0 (0%)
	Events:
	  Type     Reason                   Age    From             Message
	  ----     ------                   ----   ----             -------
	  Normal   Starting                 9m51s  kube-proxy       
	  Normal   Starting                 9m59s  kubelet          Starting kubelet.
	  Warning  CgroupV1                 9m59s  kubelet          cgroup v1 support is in maintenance mode, please migrate to cgroup v2
	  Normal   NodeAllocatableEnforced  9m58s  kubelet          Updated Node Allocatable limit across pods
	  Normal   NodeHasSufficientMemory  9m58s  kubelet          Node pause-600904 status is now: NodeHasSufficientMemory
	  Normal   NodeHasNoDiskPressure    9m58s  kubelet          Node pause-600904 status is now: NodeHasNoDiskPressure
	  Normal   NodeHasSufficientPID     9m58s  kubelet          Node pause-600904 status is now: NodeHasSufficientPID
	  Normal   RegisteredNode           9m55s  node-controller  Node pause-600904 event: Registered Node pause-600904 in Controller
	
	
	==> dmesg <==
	[Mar17 10:17] ACPI: SRAT not present
	[  +0.000000] ACPI: SRAT not present
	[  +0.000000] SPI driver altr_a10sr has no spi_device_id for altr,a10sr
	[  +0.014407] device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log.
	[  +0.504211] systemd[1]: Configuration file /run/systemd/system/netplan-ovs-cleanup.service is marked world-inaccessible. This has no effect as configuration data is accessible via APIs without restrictions. Proceeding anyway.
	[  +0.033838] systemd[1]: /lib/systemd/system/snapd.service:23: Unknown key name 'RestartMode' in section 'Service', ignoring.
	[  +0.824746] ena 0000:00:05.0: LLQ is not supported Fallback to host mode policy.
	[  +6.163432] kauditd_printk_skb: 36 callbacks suppressed
	[Mar17 11:13] kmem.limit_in_bytes is deprecated and will be removed. Please report your usecase to linux-mm@kvack.org if you depend on this functionality.
	
	
	==> etcd [67dba1705f273b504959b10a0393d39cc3909de9592d6d1a0c18f4219561a700] <==
	{"level":"info","ts":"2025-03-17T11:18:14.833260Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed switched to configuration voters=(11459225503572592365)"}
	{"level":"info","ts":"2025-03-17T11:18:14.834035Z","caller":"membership/cluster.go:421","msg":"added member","cluster-id":"68eaea490fab4e05","local-member-id":"9f0758e1c58a86ed","added-peer-id":"9f0758e1c58a86ed","added-peer-peer-urls":["https://192.168.85.2:2380"]}
	{"level":"info","ts":"2025-03-17T11:18:14.873212Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed is starting a new election at term 1"}
	{"level":"info","ts":"2025-03-17T11:18:14.873412Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed became pre-candidate at term 1"}
	{"level":"info","ts":"2025-03-17T11:18:14.873535Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed received MsgPreVoteResp from 9f0758e1c58a86ed at term 1"}
	{"level":"info","ts":"2025-03-17T11:18:14.873658Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed became candidate at term 2"}
	{"level":"info","ts":"2025-03-17T11:18:14.873760Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed received MsgVoteResp from 9f0758e1c58a86ed at term 2"}
	{"level":"info","ts":"2025-03-17T11:18:14.873842Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"9f0758e1c58a86ed became leader at term 2"}
	{"level":"info","ts":"2025-03-17T11:18:14.873935Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: 9f0758e1c58a86ed elected leader 9f0758e1c58a86ed at term 2"}
	{"level":"info","ts":"2025-03-17T11:18:14.879055Z","caller":"etcdserver/server.go:2651","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T11:18:14.887049Z","caller":"etcdserver/server.go:2140","msg":"published local member to cluster through raft","local-member-id":"9f0758e1c58a86ed","local-member-attributes":"{Name:pause-600904 ClientURLs:[https://192.168.85.2:2379]}","request-path":"/0/members/9f0758e1c58a86ed/attributes","cluster-id":"68eaea490fab4e05","publish-timeout":"7s"}
	{"level":"info","ts":"2025-03-17T11:18:14.887421Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-03-17T11:18:14.887439Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"68eaea490fab4e05","local-member-id":"9f0758e1c58a86ed","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T11:18:14.888094Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T11:18:14.888223Z","caller":"etcdserver/server.go:2675","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2025-03-17T11:18:14.888441Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2025-03-17T11:18:14.888547Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2025-03-17T11:18:14.889764Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-03-17T11:18:14.887460Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-03-17T11:18:14.895475Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"192.168.85.2:2379"}
	{"level":"info","ts":"2025-03-17T11:18:14.905059Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-03-17T11:18:14.913102Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2025-03-17T11:28:15.815749Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":540}
	{"level":"info","ts":"2025-03-17T11:28:15.822495Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":540,"took":"6.228844ms","hash":4160174953,"current-db-size-bytes":1331200,"current-db-size":"1.3 MB","current-db-size-in-use-bytes":1331200,"current-db-size-in-use":"1.3 MB"}
	{"level":"info","ts":"2025-03-17T11:28:15.822559Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":4160174953,"revision":540,"compact-revision":-1}
	
	
	==> kernel <==
	 11:28:21 up  1:10,  0 users,  load average: 0.17, 1.05, 1.57
	Linux pause-600904 5.15.0-1077-aws #84~20.04.1-Ubuntu SMP Mon Jan 20 22:14:27 UTC 2025 aarch64 aarch64 aarch64 GNU/Linux
	PRETTY_NAME="Ubuntu 22.04.5 LTS"
	
	
	==> kube-apiserver [b44e7c74dba782d4ef4b4487d2748ac7d1cdce136f0de307b724a50af202918b] <==
	I0317 11:18:20.099930       1 aggregator.go:171] initial CRD sync complete...
	I0317 11:18:20.100024       1 autoregister_controller.go:144] Starting autoregister controller
	I0317 11:18:20.100110       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0317 11:18:20.100197       1 cache.go:39] Caches are synced for autoregister controller
	I0317 11:18:20.129537       1 apf_controller.go:382] Running API Priority and Fairness config worker
	I0317 11:18:20.130014       1 apf_controller.go:385] Running API Priority and Fairness periodic rebalancing process
	I0317 11:18:20.130191       1 shared_informer.go:320] Caches are synced for node_authorizer
	I0317 11:18:20.130336       1 shared_informer.go:320] Caches are synced for configmaps
	I0317 11:18:20.178706       1 controller.go:615] quota admission added evaluator for: namespaces
	I0317 11:18:20.273561       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0317 11:18:20.549954       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0317 11:18:20.564755       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0317 11:18:20.566121       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0317 11:18:21.485276       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0317 11:18:21.537531       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0317 11:18:21.674094       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0317 11:18:21.681861       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [192.168.85.2]
	I0317 11:18:21.683942       1 controller.go:615] quota admission added evaluator for: endpoints
	I0317 11:18:21.689239       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0317 11:18:22.190402       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0317 11:18:22.763751       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0317 11:18:22.803207       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0317 11:18:22.821194       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0317 11:18:27.668758       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0317 11:18:27.784460       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	
	
	==> kube-controller-manager [5e4eed930d5d993405d52af92fe6a1b0d9f769b057b87c69483fb8fd567aee4e] <==
	I0317 11:18:26.938084       1 shared_informer.go:320] Caches are synced for ephemeral
	I0317 11:18:26.938435       1 shared_informer.go:320] Caches are synced for resource quota
	I0317 11:18:26.938583       1 shared_informer.go:320] Caches are synced for endpoint_slice
	I0317 11:18:26.938679       1 shared_informer.go:320] Caches are synced for ReplicationController
	I0317 11:18:26.947605       1 shared_informer.go:320] Caches are synced for deployment
	I0317 11:18:26.948777       1 range_allocator.go:428] "Set node PodCIDR" logger="node-ipam-controller" node="pause-600904" podCIDRs=["10.244.0.0/24"]
	I0317 11:18:26.949062       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="pause-600904"
	I0317 11:18:26.949155       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="pause-600904"
	I0317 11:18:26.948086       1 shared_informer.go:320] Caches are synced for ReplicaSet
	I0317 11:18:26.948153       1 shared_informer.go:320] Caches are synced for PVC protection
	I0317 11:18:26.948454       1 shared_informer.go:320] Caches are synced for disruption
	I0317 11:18:26.948804       1 shared_informer.go:320] Caches are synced for legacy-service-account-token-cleaner
	I0317 11:18:27.405216       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="pause-600904"
	I0317 11:18:28.011117       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="196.886582ms"
	I0317 11:18:28.075381       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="64.198943ms"
	I0317 11:18:28.075545       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="92.637µs"
	I0317 11:18:28.115806       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="76.423µs"
	I0317 11:18:28.578184       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="47.843965ms"
	I0317 11:18:28.599243       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="21.016459ms"
	I0317 11:18:28.599344       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="55.476µs"
	I0317 11:18:30.279544       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="51.57µs"
	I0317 11:18:30.293254       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="50.659µs"
	I0317 11:18:30.306606       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="56.033µs"
	I0317 11:18:33.834396       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="pause-600904"
	I0317 11:25:11.362224       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="pause-600904"
	
	
	==> kube-proxy [971796530cee945730af755cfa6cb87590e7a73574e16176ded3de9561997343] <==
	I0317 11:18:29.328944       1 server_linux.go:66] "Using iptables proxy"
	I0317 11:18:29.436326       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["192.168.85.2"]
	E0317 11:18:29.436571       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0317 11:18:29.457419       1 server.go:243] "kube-proxy running in dual-stack mode" primary ipFamily="IPv4"
	I0317 11:18:29.457672       1 server_linux.go:170] "Using iptables Proxier"
	I0317 11:18:29.460171       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0317 11:18:29.460691       1 server.go:497] "Version info" version="v1.32.2"
	I0317 11:18:29.460926       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0317 11:18:29.462972       1 config.go:199] "Starting service config controller"
	I0317 11:18:29.463109       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0317 11:18:29.463197       1 config.go:105] "Starting endpoint slice config controller"
	I0317 11:18:29.463269       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0317 11:18:29.465939       1 config.go:329] "Starting node config controller"
	I0317 11:18:29.466109       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0317 11:18:29.564104       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0317 11:18:29.564144       1 shared_informer.go:320] Caches are synced for service config
	I0317 11:18:29.566448       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [0d46faee4abc1e37dc40ccfaa65f89186854897dcee9d70bbd210b568ff3c300] <==
	W0317 11:18:20.779581       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0317 11:18:20.779613       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.779654       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes is forbidden: User "system:kube-scheduler" cannot list resource "nodes" in API group "" at the cluster scope
	E0317 11:18:20.779677       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes is forbidden: User \"system:kube-scheduler\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.779720       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csinodes" in API group "storage.k8s.io" at the cluster scope
	E0317 11:18:20.779741       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSINode: failed to list *v1.CSINode: csinodes.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csinodes\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.779784       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Pod: pods is forbidden: User "system:kube-scheduler" cannot list resource "pods" in API group "" at the cluster scope
	E0317 11:18:20.779801       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Pod: failed to list *v1.Pod: pods is forbidden: User \"system:kube-scheduler\" cannot list resource \"pods\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.779897       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0317 11:18:20.779917       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.779954       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User "system:kube-scheduler" cannot list resource "statefulsets" in API group "apps" at the cluster scope
	E0317 11:18:20.779973       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.780013       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0317 11:18:20.780031       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.780063       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0317 11:18:20.780078       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.780116       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0317 11:18:20.780132       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.780168       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope
	E0317 11:18:20.780186       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.780291       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User "system:kube-scheduler" cannot list resource "poddisruptionbudgets" in API group "policy" at the cluster scope
	E0317 11:18:20.780314       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PodDisruptionBudget: failed to list *v1.PodDisruptionBudget: poddisruptionbudgets.policy is forbidden: User \"system:kube-scheduler\" cannot list resource \"poddisruptionbudgets\" in API group \"policy\" at the cluster scope" logger="UnhandledError"
	W0317 11:18:20.781036       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Namespace: namespaces is forbidden: User "system:kube-scheduler" cannot list resource "namespaces" in API group "" at the cluster scope
	E0317 11:18:20.781071       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Namespace: failed to list *v1.Namespace: namespaces is forbidden: User \"system:kube-scheduler\" cannot list resource \"namespaces\" in API group \"\" at the cluster scope" logger="UnhandledError"
	I0317 11:18:21.971663       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Mar 17 11:27:18 pause-600904 kubelet[1566]: E0317 11:27:18.087015    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kindnet-cni\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": failed to copy: httpReadSeeker: failed open: unexpected status code https://registry-1.docker.io/v2/kindest/kindnetd/manifests/sha256:86c933f3845d6a993c8f64632752b10aae67a4756c59096b3259426e839be955: 429 Too Many Requests - Server message: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="kube-system/kindnet-w9d57" podUID="6ab3f7ec-9933-4bab-b726-a1d9b6eb0850"
	Mar 17 11:27:29 pause-600904 kubelet[1566]: E0317 11:27:29.142621    1566 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\": failed to find network info for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\""
	Mar 17 11:27:29 pause-600904 kubelet[1566]: E0317 11:27:29.142684    1566 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\": failed to find network info for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:27:29 pause-600904 kubelet[1566]: E0317 11:27:29.142707    1566 kuberuntime_manager.go:1237] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\": failed to find network info for sandbox \"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:27:29 pause-600904 kubelet[1566]: E0317 11:27:29.142750    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\\\": failed to find network info for sandbox \\\"3a9a9ff5c9e63ac570745f413c2a8ebae4bcee4456cc5dd2deff4bf52e21084e\\\"\"" pod="kube-system/coredns-668d6bf9bc-zt5x9" podUID="91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee"
	Mar 17 11:27:32 pause-600904 kubelet[1566]: E0317 11:27:32.086737    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kindnet-cni\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": failed to copy: httpReadSeeker: failed open: unexpected status code https://registry-1.docker.io/v2/kindest/kindnetd/manifests/sha256:86c933f3845d6a993c8f64632752b10aae67a4756c59096b3259426e839be955: 429 Too Many Requests - Server message: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="kube-system/kindnet-w9d57" podUID="6ab3f7ec-9933-4bab-b726-a1d9b6eb0850"
	Mar 17 11:27:43 pause-600904 kubelet[1566]: E0317 11:27:43.118768    1566 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\": failed to find network info for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\""
	Mar 17 11:27:43 pause-600904 kubelet[1566]: E0317 11:27:43.118845    1566 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\": failed to find network info for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:27:43 pause-600904 kubelet[1566]: E0317 11:27:43.118900    1566 kuberuntime_manager.go:1237] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\": failed to find network info for sandbox \"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:27:43 pause-600904 kubelet[1566]: E0317 11:27:43.118987    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\\\": failed to find network info for sandbox \\\"3755aa42bfba4c97dd8c8675492cb90a0f3ed24994837acc55101edba5589882\\\"\"" pod="kube-system/coredns-668d6bf9bc-zt5x9" podUID="91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee"
	Mar 17 11:27:44 pause-600904 kubelet[1566]: E0317 11:27:44.086648    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kindnet-cni\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": failed to copy: httpReadSeeker: failed open: unexpected status code https://registry-1.docker.io/v2/kindest/kindnetd/manifests/sha256:86c933f3845d6a993c8f64632752b10aae67a4756c59096b3259426e839be955: 429 Too Many Requests - Server message: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="kube-system/kindnet-w9d57" podUID="6ab3f7ec-9933-4bab-b726-a1d9b6eb0850"
	Mar 17 11:27:56 pause-600904 kubelet[1566]: E0317 11:27:56.122346    1566 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\": failed to find network info for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\""
	Mar 17 11:27:56 pause-600904 kubelet[1566]: E0317 11:27:56.122419    1566 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\": failed to find network info for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:27:56 pause-600904 kubelet[1566]: E0317 11:27:56.122443    1566 kuberuntime_manager.go:1237] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\": failed to find network info for sandbox \"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:27:56 pause-600904 kubelet[1566]: E0317 11:27:56.122487    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\\\": failed to find network info for sandbox \\\"0c19e88f84c1a18797f934df1db1625e64448cf4d01d41e47233631853dedae6\\\"\"" pod="kube-system/coredns-668d6bf9bc-zt5x9" podUID="91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee"
	Mar 17 11:27:59 pause-600904 kubelet[1566]: E0317 11:27:59.086708    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kindnet-cni\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": failed to copy: httpReadSeeker: failed open: unexpected status code https://registry-1.docker.io/v2/kindest/kindnetd/manifests/sha256:86c933f3845d6a993c8f64632752b10aae67a4756c59096b3259426e839be955: 429 Too Many Requests - Server message: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="kube-system/kindnet-w9d57" podUID="6ab3f7ec-9933-4bab-b726-a1d9b6eb0850"
	Mar 17 11:28:08 pause-600904 kubelet[1566]: E0317 11:28:08.124810    1566 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\": failed to find network info for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\""
	Mar 17 11:28:08 pause-600904 kubelet[1566]: E0317 11:28:08.124880    1566 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\": failed to find network info for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:28:08 pause-600904 kubelet[1566]: E0317 11:28:08.124902    1566 kuberuntime_manager.go:1237] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\": failed to find network info for sandbox \"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:28:08 pause-600904 kubelet[1566]: E0317 11:28:08.124953    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\\\": failed to find network info for sandbox \\\"37bc9e389906800e94d0f3a8e4c2d43fa06a71751079da883bd30c36897bae4d\\\"\"" pod="kube-system/coredns-668d6bf9bc-zt5x9" podUID="91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee"
	Mar 17 11:28:12 pause-600904 kubelet[1566]: E0317 11:28:12.086762    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kindnet-cni\" with ImagePullBackOff: \"Back-off pulling image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": ErrImagePull: failed to pull and unpack image \\\"docker.io/kindest/kindnetd:v20250214-acbabc1a\\\": failed to copy: httpReadSeeker: failed open: unexpected status code https://registry-1.docker.io/v2/kindest/kindnetd/manifests/sha256:86c933f3845d6a993c8f64632752b10aae67a4756c59096b3259426e839be955: 429 Too Many Requests - Server message: toomanyrequests: You have reached your unauthenticated pull rate limit. https://www.docker.com/increase-rate-limit\"" pod="kube-system/kindnet-w9d57" podUID="6ab3f7ec-9933-4bab-b726-a1d9b6eb0850"
	Mar 17 11:28:19 pause-600904 kubelet[1566]: E0317 11:28:19.146121    1566 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\": failed to find network info for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\""
	Mar 17 11:28:19 pause-600904 kubelet[1566]: E0317 11:28:19.146199    1566 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\": failed to find network info for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:28:19 pause-600904 kubelet[1566]: E0317 11:28:19.146223    1566 kuberuntime_manager.go:1237] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to setup network for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\": failed to find network info for sandbox \"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\"" pod="kube-system/coredns-668d6bf9bc-zt5x9"
	Mar 17 11:28:19 pause-600904 kubelet[1566]: E0317 11:28:19.146286    1566 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"coredns-668d6bf9bc-zt5x9_kube-system(91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee)\\\": rpc error: code = Unknown desc = failed to setup network for sandbox \\\"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\\\": failed to find network info for sandbox \\\"a6576f51cd6bc792be1d13318e5ea08aa58fd8cb7f522179fe51d8f56d2d4d09\\\"\"" pod="kube-system/coredns-668d6bf9bc-zt5x9" podUID="91b7ca30-9f8f-44a9-99d2-2c04b8ec8eee"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-linux-arm64 status --format={{.APIServer}} -p pause-600904 -n pause-600904
helpers_test.go:261: (dbg) Run:  kubectl --context pause-600904 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:272: non-running pods: coredns-668d6bf9bc-zt5x9 kindnet-w9d57
helpers_test.go:274: ======> post-mortem[TestPause/serial/Start]: describe non-running pods <======
helpers_test.go:277: (dbg) Run:  kubectl --context pause-600904 describe pod coredns-668d6bf9bc-zt5x9 kindnet-w9d57
helpers_test.go:277: (dbg) Non-zero exit: kubectl --context pause-600904 describe pod coredns-668d6bf9bc-zt5x9 kindnet-w9d57: exit status 1 (89.185728ms)

                                                
                                                
** stderr ** 
	Error from server (NotFound): pods "coredns-668d6bf9bc-zt5x9" not found
	Error from server (NotFound): pods "kindnet-w9d57" not found

                                                
                                                
** /stderr **
helpers_test.go:279: kubectl --context pause-600904 describe pod coredns-668d6bf9bc-zt5x9 kindnet-w9d57: exit status 1
--- FAIL: TestPause/serial/Start (632.82s)

                                                
                                    
x
+
TestNetworkPlugins/group/calico/Start (7200.061s)

                                                
                                                
=== RUN   TestNetworkPlugins/group/calico/Start
net_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p calico-668750 --memory=3072 --alsologtostderr --wait=true --wait-timeout=15m --cni=calico --driver=docker  --container-runtime=containerd
E0317 12:25:05.029645    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:05.036151    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:05.047690    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:05.069257    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:05.110744    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:05.192284    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:05.354274    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:05.675926    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:06.317946    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:07.599597    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:10.161434    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:14.360328    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/old-k8s-version-744018/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:15.283678    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/default-k8s-diff-port-150529/client.crt: no such file or directory" logger="UnhandledError"
E0317 12:25:19.263026    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
panic: test timed out after 2h0m0s
	running tests:
		TestNetworkPlugins (1h6m8s)
		TestNetworkPlugins/group/calico (1m2s)
		TestNetworkPlugins/group/calico/Start (1m2s)
		TestNetworkPlugins/group/kindnet (7m14s)
		TestNetworkPlugins/group/kindnet/Start (7m14s)
		TestStartStop (1h7m34s)
		TestStartStop/group (1m2s)

                                                
                                                
goroutine 4464 [running]:
testing.(*M).startAlarm.func1()
	/usr/local/go/src/testing/testing.go:2484 +0x308
created by time.goFunc
	/usr/local/go/src/time/sleep.go:215 +0x38

                                                
                                                
goroutine 1 [chan receive, 61 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1753 +0x43c
testing.tRunner(0x400013ae00, 0x40008ebbb8)
	/usr/local/go/src/testing/testing.go:1798 +0x120
testing.runTests(0x40006fa090, {0x4e24dc0, 0x2c, 0x2c}, {0x40008ebd08?, 0x122144?, 0x4e4c6a0?})
	/usr/local/go/src/testing/testing.go:2277 +0x3ec
testing.(*M).Run(0x40005b8780)
	/usr/local/go/src/testing/testing.go:2142 +0x588
k8s.io/minikube/test/integration.TestMain(0x40005b8780)
	/home/jenkins/workspace/Build_Cross/test/integration/main_test.go:62 +0x84
main.main()
	_testmain.go:133 +0x98

                                                
                                                
goroutine 2496 [chan receive, 67 minutes]:
testing.(*testState).waitParallel(0x40008dc460)
	/usr/local/go/src/testing/testing.go:1926 +0x160
testing.(*T).Parallel(0x4001502e00)
	/usr/local/go/src/testing/testing.go:1578 +0x1bc
k8s.io/minikube/test/integration.MaybeParallel(0x4001502e00)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x4001502e00)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x4001502e00, 0x4000625180)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2335
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 3637 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4001d82490, 0xf)
	/usr/local/go/src/runtime/sema.go:597 +0x150
sync.(*Cond).Wait(0x4001d82480)
	/usr/local/go/src/sync/cond.go:71 +0xc4
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3438640)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/queue.go:277 +0x88
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001d824c0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:159 +0x3c
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x40004bb808?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4001dbbaa0, {0x33e52a0, 0x4001b3d6b0}, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4001dbbaa0, 0x3b9aca00, 0x0, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 3634
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2335 [chan receive, 7 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1753 +0x43c
testing.tRunner(0x40014121c0, 0x40014f0450)
	/usr/local/go/src/testing/testing.go:1798 +0x120
created by testing.(*T).Run in goroutine 2082
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 2623 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2622
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 108 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x3435700)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:311 +0x284
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 113
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:148 +0x200

                                                
                                                
goroutine 3634 [chan receive, 20 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001d824c0, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 3629
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cache.go:122 +0x490

                                                
                                                
goroutine 115 [select, 1 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x3424870, 0x40001020e0}, 0x4001404740, 0x4001595f88)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x3424870, 0x40001020e0}, 0x58?, 0x4001404740, 0x4001404788)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x3424870?, 0x40001020e0?}, 0x0?, 0x0?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x90984?, 0x4000956900?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 109
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 1142 [chan send, 109 minutes]:
os/exec.(*Cmd).watchCtx(0x40008f8f00, 0x4001e48460)
	/usr/local/go/src/os/exec/exec.go:814 +0x2c4
created by os/exec.(*Cmd).Start in goroutine 741
	/usr/local/go/src/os/exec/exec.go:775 +0x738

                                                
                                                
goroutine 114 [sync.Cond.Wait, 1 minutes]:
sync.runtime_notifyListWait(0x40006e5150, 0x2d)
	/usr/local/go/src/runtime/sema.go:597 +0x150
sync.(*Cond).Wait(0x40006e5140)
	/usr/local/go/src/sync/cond.go:71 +0xc4
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3438640)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/queue.go:277 +0x88
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x40006e5180)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:159 +0x3c
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x40004bb808?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4000087230, {0x33e52a0, 0x4001344210}, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4000087230, 0x3b9aca00, 0x0, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 109
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 116 [select, 1 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 115
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 109 [chan receive, 117 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x40006e5180, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 113
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cache.go:122 +0x490

                                                
                                                
goroutine 4467 [syscall]:
syscall.Syscall6(0x5f, 0x3, 0x11, 0x4001591c38, 0x4, 0x4000125680, 0x0)
	/usr/local/go/src/syscall/syscall_linux.go:95 +0x2c
internal/syscall/unix.Waitid(0x4001591d98?, 0x191990?, 0xffffea9ea1b5?, 0x0?, 0x40005ccb40?)
	/usr/local/go/src/internal/syscall/unix/waitid_linux.go:18 +0x44
os.(*Process).pidfdWait.func1(...)
	/usr/local/go/src/os/pidfd_linux.go:106
os.ignoringEINTR(...)
	/usr/local/go/src/os/file_posix.go:251
os.(*Process).pidfdWait(0x4001496200)
	/usr/local/go/src/os/pidfd_linux.go:105 +0x1cc
os.(*Process).wait(0x3?)
	/usr/local/go/src/os/exec_unix.go:27 +0x2c
os.(*Process).Wait(...)
	/usr/local/go/src/os/exec.go:358
os/exec.(*Cmd).Wait(0x40014d8480)
	/usr/local/go/src/os/exec/exec.go:922 +0x38
os/exec.(*Cmd).Run(0x40014d8480)
	/usr/local/go/src/os/exec/exec.go:626 +0x38
k8s.io/minikube/test/integration.Run(0x400149c1c0, 0x40014d8480)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:103 +0x180
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1.1(0x400149c1c0)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:112 +0x50
testing.tRunner(0x400149c1c0, 0x4001426390)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2497
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 4469 [IO wait]:
internal/poll.runtime_pollWait(0xffff64ec9aa8, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x4001510600?, 0x40017dc460?, 0x1)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x4001510600, {0x40017dc460, 0x9ba0, 0x9ba0})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0x40005b6270, {0x40017dc460?, 0x4001779d68?, 0x865c8?})
	/usr/local/go/src/os/file.go:124 +0x6c
bytes.(*Buffer).ReadFrom(0x40014264b0, {0x33e3758, 0x4000778240})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
io.copyBuffer({0x33e38e0, 0x40014264b0}, {0x33e3758, 0x4000778240}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x14c
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x40005b6270?, {0x33e38e0, 0x40014264b0})
	/usr/local/go/src/os/file.go:275 +0x58
os.(*File).WriteTo(0x40005b6270, {0x33e38e0, 0x40014264b0})
	/usr/local/go/src/os/file.go:253 +0xa0
io.copyBuffer({0x33e38e0, 0x40014264b0}, {0x33e37d8, 0x40005b6270}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x98
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x44
os/exec.(*Cmd).Start.func2(0x4001ae0a80?)
	/usr/local/go/src/os/exec/exec.go:749 +0x34
created by os/exec.(*Cmd).Start in goroutine 4467
	/usr/local/go/src/os/exec/exec.go:748 +0x76c

                                                
                                                
goroutine 849 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x3435700)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:311 +0x284
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 816
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:148 +0x200

                                                
                                                
goroutine 850 [chan receive, 111 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001f2de80, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 816
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cache.go:122 +0x490

                                                
                                                
goroutine 632 [IO wait, 113 minutes]:
internal/poll.runtime_pollWait(0xffff64ec9cd8, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x4000286280?, 0x3800000038?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Accept(0x4000286280)
	/usr/local/go/src/internal/poll/fd_unix.go:620 +0x24c
net.(*netFD).accept(0x4000286280)
	/usr/local/go/src/net/fd_unix.go:172 +0x28
net.(*TCPListener).accept(0x4001f2c6c0)
	/usr/local/go/src/net/tcpsock_posix.go:159 +0x24
net.(*TCPListener).Accept(0x4001f2c6c0)
	/usr/local/go/src/net/tcpsock.go:380 +0x2c
net/http.(*Server).Serve(0x400139a200, {0x3413150, 0x4001f2c6c0})
	/usr/local/go/src/net/http/server.go:3424 +0x290
net/http.(*Server).ListenAndServe(0x400139a200)
	/usr/local/go/src/net/http/server.go:3350 +0x84
k8s.io/minikube/test/integration.startHTTPProxy.func1(...)
	/home/jenkins/workspace/Build_Cross/test/integration/functional_test.go:2230
created by k8s.io/minikube/test/integration.startHTTPProxy in goroutine 630
	/home/jenkins/workspace/Build_Cross/test/integration/functional_test.go:2229 +0x11c

                                                
                                                
goroutine 2082 [chan receive, 67 minutes]:
testing.(*T).Run(0x40018981c0, {0x26f01bf?, 0x91980ea0015fb94?}, 0x40014f0450)
	/usr/local/go/src/testing/testing.go:1859 +0x388
k8s.io/minikube/test/integration.TestNetworkPlugins(0x40018981c0)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:52 +0xcc
testing.tRunner(0x40018981c0, 0x3088a68)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 1146 [chan send, 109 minutes]:
os/exec.(*Cmd).watchCtx(0x40008f9800, 0x4001e48930)
	/usr/local/go/src/os/exec/exec.go:814 +0x2c4
created by os/exec.(*Cmd).Start in goroutine 1145
	/usr/local/go/src/os/exec/exec.go:775 +0x738

                                                
                                                
goroutine 3638 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x3424870, 0x40001020e0}, 0x4001772740, 0x4001772788)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x3424870, 0x40001020e0}, 0x68?, 0x4001772740, 0x4001772788)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x3424870?, 0x40001020e0?}, 0x0?, 0x90984?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x40013b44e0?, 0x90984?, 0x400071f080?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 3634
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 838 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4001f2de50, 0x2c)
	/usr/local/go/src/runtime/sema.go:597 +0x150
sync.(*Cond).Wait(0x4001f2de40)
	/usr/local/go/src/sync/cond.go:71 +0xc4
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3438640)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/queue.go:277 +0x88
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001f2de80)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:159 +0x3c
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x40008fe808?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40009bfdc0, {0x33e52a0, 0x40015d0450}, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40009bfdc0, 0x3b9aca00, 0x0, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 850
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 4261 [syscall, 7 minutes]:
syscall.Syscall6(0x5f, 0x3, 0x10, 0x4001365c38, 0x4, 0x4000084990, 0x0)
	/usr/local/go/src/syscall/syscall_linux.go:95 +0x2c
internal/syscall/unix.Waitid(0x4001365d98?, 0x191990?, 0xffffea9ea1b5?, 0x0?, 0x40005ccc00?)
	/usr/local/go/src/internal/syscall/unix/waitid_linux.go:18 +0x44
os.(*Process).pidfdWait.func1(...)
	/usr/local/go/src/os/pidfd_linux.go:106
os.ignoringEINTR(...)
	/usr/local/go/src/os/file_posix.go:251
os.(*Process).pidfdWait(0x4001dc0240)
	/usr/local/go/src/os/pidfd_linux.go:105 +0x1cc
os.(*Process).wait(0x3?)
	/usr/local/go/src/os/exec_unix.go:27 +0x2c
os.(*Process).Wait(...)
	/usr/local/go/src/os/exec.go:358
os/exec.(*Cmd).Wait(0x40008f8600)
	/usr/local/go/src/os/exec/exec.go:922 +0x38
os/exec.(*Cmd).Run(0x40008f8600)
	/usr/local/go/src/os/exec/exec.go:626 +0x38
k8s.io/minikube/test/integration.Run(0x4001bae000, 0x40008f8600)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:103 +0x180
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1.1(0x4001bae000)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:112 +0x50
testing.tRunner(0x4001bae000, 0x40009e6b40)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2431
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 2622 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x3424870, 0x40001020e0}, 0x40013e6f40, 0x40013e6f88)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x3424870, 0x40001020e0}, 0x20?, 0x40013e6f40, 0x40013e6f88)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x3424870?, 0x40001020e0?}, 0x0?, 0x0?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x90984?, 0x4000956c00?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2640
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2146 [chan receive, 67 minutes]:
testing.(*T).Run(0x4001898e00, {0x26f01bf?, 0x40013adf58?}, 0x3088ca0)
	/usr/local/go/src/testing/testing.go:1859 +0x388
k8s.io/minikube/test/integration.TestStartStop(0x4001898e00)
	/home/jenkins/workspace/Build_Cross/test/integration/start_stop_delete_test.go:46 +0x3c
testing.tRunner(0x4001898e00, 0x3088ab0)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 2430 [chan receive, 67 minutes]:
testing.(*testState).waitParallel(0x40008dc460)
	/usr/local/go/src/testing/testing.go:1926 +0x160
testing.(*T).Parallel(0x400079e540)
	/usr/local/go/src/testing/testing.go:1578 +0x1bc
k8s.io/minikube/test/integration.MaybeParallel(0x400079e540)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400079e540)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x400079e540, 0x400066c800)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2335
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 839 [select]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x3424870, 0x40001020e0}, 0x4001367f40, 0x4001367f88)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x3424870, 0x40001020e0}, 0xe0?, 0x4001367f40, 0x4001367f88)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x3424870?, 0x40001020e0?}, 0x15fcb0?, 0x40009c5c00?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x90984?, 0x40004b1500?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 850
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 1085 [chan send, 109 minutes]:
os/exec.(*Cmd).watchCtx(0x4000957e00, 0x4000103f80)
	/usr/local/go/src/os/exec/exec.go:814 +0x2c4
created by os/exec.(*Cmd).Start in goroutine 1084
	/usr/local/go/src/os/exec/exec.go:775 +0x738

                                                
                                                
goroutine 840 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 839
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2340 [chan receive]:
testing.(*testState).waitParallel(0x40008dc460)
	/usr/local/go/src/testing/testing.go:1926 +0x160
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1768 +0x544
testing.tRunner(0x40009c5180, 0x3088ca0)
	/usr/local/go/src/testing/testing.go:1798 +0x120
created by testing.(*T).Run in goroutine 2146
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 4262 [IO wait, 7 minutes]:
internal/poll.runtime_pollWait(0xffff64ec9bc0, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x4001378a20?, 0x40008b4bf0?, 0x1)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x4001378a20, {0x40008b4bf0, 0x410, 0x410})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0x40005b6288, {0x40008b4bf0?, 0x4001774568?, 0x865c8?})
	/usr/local/go/src/os/file.go:124 +0x6c
bytes.(*Buffer).ReadFrom(0x40009e6d50, {0x33e3758, 0x40007781d0})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
io.copyBuffer({0x33e38e0, 0x40009e6d50}, {0x33e3758, 0x40007781d0}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x14c
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x40005b6288?, {0x33e38e0, 0x40009e6d50})
	/usr/local/go/src/os/file.go:275 +0x58
os.(*File).WriteTo(0x40005b6288, {0x33e38e0, 0x40009e6d50})
	/usr/local/go/src/os/file.go:253 +0xa0
io.copyBuffer({0x33e38e0, 0x40009e6d50}, {0x33e37d8, 0x40005b6288}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x98
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x44
os/exec.(*Cmd).Start.func2(0x4001bae000?)
	/usr/local/go/src/os/exec/exec.go:749 +0x34
created by os/exec.(*Cmd).Start in goroutine 4261
	/usr/local/go/src/os/exec/exec.go:748 +0x76c

                                                
                                                
goroutine 2639 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x3435700)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:311 +0x284
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2635
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:148 +0x200

                                                
                                                
goroutine 2428 [chan receive, 67 minutes]:
testing.(*testState).waitParallel(0x40008dc460)
	/usr/local/go/src/testing/testing.go:1926 +0x160
testing.(*T).Parallel(0x4001413a40)
	/usr/local/go/src/testing/testing.go:1578 +0x1bc
k8s.io/minikube/test/integration.MaybeParallel(0x4001413a40)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x4001413a40)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x4001413a40, 0x400066c700)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2335
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 1151 [select, 109 minutes]:
net/http.(*persistConn).writeLoop(0x40015237a0)
	/usr/local/go/src/net/http/transport.go:2590 +0x9c
created by net/http.(*Transport).dialConn in goroutine 1197
	/usr/local/go/src/net/http/transport.go:1945 +0x120c

                                                
                                                
goroutine 1150 [select, 109 minutes]:
net/http.(*persistConn).readLoop(0x40015237a0)
	/usr/local/go/src/net/http/transport.go:2395 +0xb04
created by net/http.(*Transport).dialConn in goroutine 1197
	/usr/local/go/src/net/http/transport.go:1944 +0x11c4

                                                
                                                
goroutine 2431 [chan receive, 7 minutes]:
testing.(*T).Run(0x400079f500, {0x26f01c4?, 0x33dab58?}, 0x40009e6b40)
	/usr/local/go/src/testing/testing.go:1859 +0x388
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x400079f500)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:111 +0x558
testing.tRunner(0x400079f500, 0x400066c880)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2335
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 3633 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x3435700)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:311 +0x284
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 3629
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:148 +0x200

                                                
                                                
goroutine 2915 [select, 1 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 2914
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:280 +0xc0

                                                
                                                
goroutine 2497 [chan receive]:
testing.(*T).Run(0x4001503180, {0x26f01c4?, 0x33dab58?}, 0x4001426390)
	/usr/local/go/src/testing/testing.go:1859 +0x388
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x4001503180)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:111 +0x558
testing.tRunner(0x4001503180, 0x4000625200)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2335
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 2429 [chan receive, 67 minutes]:
testing.(*testState).waitParallel(0x40008dc460)
	/usr/local/go/src/testing/testing.go:1926 +0x160
testing.(*T).Parallel(0x4001413c00)
	/usr/local/go/src/testing/testing.go:1578 +0x1bc
k8s.io/minikube/test/integration.MaybeParallel(0x4001413c00)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x40
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0x4001413c00)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x2ec
testing.tRunner(0x4001413c00, 0x400066c780)
	/usr/local/go/src/testing/testing.go:1792 +0xe4
created by testing.(*T).Run in goroutine 2335
	/usr/local/go/src/testing/testing.go:1851 +0x374

                                                
                                                
goroutine 4263 [IO wait]:
internal/poll.runtime_pollWait(0xffff64ec9df0, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x4001378ae0?, 0x4001d5beda?, 0x1)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x4001378ae0, {0x4001d5beda, 0x18126, 0x18126})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0x40005b62a0, {0x4001d5beda?, 0x4001779568?, 0x865c8?})
	/usr/local/go/src/os/file.go:124 +0x6c
bytes.(*Buffer).ReadFrom(0x40009e6e10, {0x33e3758, 0x40007781e8})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
io.copyBuffer({0x33e38e0, 0x40009e6e10}, {0x33e3758, 0x40007781e8}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x14c
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x40005b62a0?, {0x33e38e0, 0x40009e6e10})
	/usr/local/go/src/os/file.go:275 +0x58
os.(*File).WriteTo(0x40005b62a0, {0x33e38e0, 0x40009e6e10})
	/usr/local/go/src/os/file.go:253 +0xa0
io.copyBuffer({0x33e38e0, 0x40009e6e10}, {0x33e37d8, 0x40005b62a0}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x98
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x44
os/exec.(*Cmd).Start.func2(0x4001ae01c0?)
	/usr/local/go/src/os/exec/exec.go:749 +0x34
created by os/exec.(*Cmd).Start in goroutine 4261
	/usr/local/go/src/os/exec/exec.go:748 +0x76c

                                                
                                                
goroutine 4468 [IO wait]:
internal/poll.runtime_pollWait(0xffff64a70070, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0xa0
internal/poll.(*pollDesc).wait(0x4001510540?, 0x4001432bf0?, 0x1)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x28
internal/poll.(*pollDesc).waitRead(...)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:89
internal/poll.(*FD).Read(0x4001510540, {0x4001432bf0, 0x410, 0x410})
	/usr/local/go/src/internal/poll/fd_unix.go:165 +0x1fc
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0x40005b6258, {0x4001432bf0?, 0x40000a6568?, 0x865c8?})
	/usr/local/go/src/os/file.go:124 +0x6c
bytes.(*Buffer).ReadFrom(0x4001426480, {0x33e3758, 0x4000778238})
	/usr/local/go/src/bytes/buffer.go:211 +0x90
io.copyBuffer({0x33e38e0, 0x4001426480}, {0x33e3758, 0x4000778238}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x14c
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x40005b6258?, {0x33e38e0, 0x4001426480})
	/usr/local/go/src/os/file.go:275 +0x58
os.(*File).WriteTo(0x40005b6258, {0x33e38e0, 0x4001426480})
	/usr/local/go/src/os/file.go:253 +0xa0
io.copyBuffer({0x33e38e0, 0x4001426480}, {0x33e37d8, 0x40005b6258}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x98
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x44
os/exec.(*Cmd).Start.func2(0x400149c1c0?)
	/usr/local/go/src/os/exec/exec.go:749 +0x34
created by os/exec.(*Cmd).Start in goroutine 4467
	/usr/local/go/src/os/exec/exec.go:748 +0x76c

                                                
                                                
goroutine 4264 [select, 7 minutes]:
os/exec.(*Cmd).watchCtx(0x40008f8600, 0x40013aabd0)
	/usr/local/go/src/os/exec/exec.go:789 +0x78
created by os/exec.(*Cmd).Start in goroutine 4261
	/usr/local/go/src/os/exec/exec.go:775 +0x738

                                                
                                                
goroutine 4470 [select]:
os/exec.(*Cmd).watchCtx(0x40014d8480, 0x4000103960)
	/usr/local/go/src/os/exec/exec.go:789 +0x78
created by os/exec.(*Cmd).Start in goroutine 4467
	/usr/local/go/src/os/exec/exec.go:775 +0x738

                                                
                                                
goroutine 2621 [sync.Cond.Wait]:
sync.runtime_notifyListWait(0x4001b84810, 0x1b)
	/usr/local/go/src/runtime/sema.go:597 +0x150
sync.(*Cond).Wait(0x4001b84800)
	/usr/local/go/src/sync/cond.go:71 +0xc4
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3438640)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/queue.go:277 +0x88
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001b84840)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:159 +0x3c
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x4e4e760?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x4001dba080, {0x33e52a0, 0x40013441e0}, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x4001dba080, 0x3b9aca00, 0x0, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2640
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2913 [sync.Cond.Wait, 1 minutes]:
sync.runtime_notifyListWait(0x4001f2c7d0, 0x1b)
	/usr/local/go/src/runtime/sema.go:597 +0x150
sync.(*Cond).Wait(0x4001f2c7c0)
	/usr/local/go/src/sync/cond.go:71 +0xc4
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x3438640)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/queue.go:277 +0x88
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0x4001f2c800)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:159 +0x3c
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0x4e4e760?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:226 +0x40
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x40000862d0, {0x33e52a0, 0x400074a4e0}, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:227 +0x90
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x40000862d0, 0x3b9aca00, 0x0, 0x1, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:204 +0x80
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2903
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:143 +0x198

                                                
                                                
goroutine 2640 [chan receive, 50 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001b84840, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2635
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cache.go:122 +0x490

                                                
                                                
goroutine 2914 [select, 1 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x3424870, 0x40001020e0}, 0x4001402f40, 0x400159ff88)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/wait.go:205 +0xb0
k8s.io/apimachinery/pkg/util/wait.poll({0x3424870, 0x40001020e0}, 0x0?, 0x4001402f40, 0x4001402f88)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:260 +0x90
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x3424870?, 0x40001020e0?}, 0x0?, 0x0?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:200 +0x44
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x0?, 0x90984?, 0x400175d500?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:187 +0x40
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 2903
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:145 +0x23c

                                                
                                                
goroutine 2902 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x3435700)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:311 +0x284
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 2898
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:148 +0x200

                                                
                                                
goroutine 2903 [chan receive, 46 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0x4001f2c800, 0x40001020e0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:150 +0x248
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 2898
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cache.go:122 +0x490

                                                
                                                
goroutine 3639 [select]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:297 +0x150
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 3638
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:280 +0xc0

                                                
                                    

Test pass (196/226)

Order passed test Duration
3 TestDownloadOnly/v1.20.0/json-events 6.85
4 TestDownloadOnly/v1.20.0/preload-exists 0
8 TestDownloadOnly/v1.20.0/LogsDuration 0.1
9 TestDownloadOnly/v1.20.0/DeleteAll 0.22
10 TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds 0.14
12 TestDownloadOnly/v1.32.2/json-events 6.1
13 TestDownloadOnly/v1.32.2/preload-exists 0
17 TestDownloadOnly/v1.32.2/LogsDuration 0.09
18 TestDownloadOnly/v1.32.2/DeleteAll 0.23
19 TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds 0.15
21 TestBinaryMirror 0.59
25 TestAddons/PreSetup/EnablingAddonOnNonExistingCluster 0.07
26 TestAddons/PreSetup/DisablingAddonOnNonExistingCluster 0.08
27 TestAddons/Setup 218.21
29 TestAddons/serial/Volcano 41.3
31 TestAddons/serial/GCPAuth/Namespaces 0.19
32 TestAddons/serial/GCPAuth/FakeCredentials 9.87
35 TestAddons/parallel/Registry 15.98
36 TestAddons/parallel/Ingress 21.27
37 TestAddons/parallel/InspektorGadget 11.81
38 TestAddons/parallel/MetricsServer 5.96
40 TestAddons/parallel/CSI 37.84
41 TestAddons/parallel/Headlamp 17.11
42 TestAddons/parallel/CloudSpanner 5.55
43 TestAddons/parallel/LocalPath 8.52
44 TestAddons/parallel/NvidiaDevicePlugin 6.49
45 TestAddons/parallel/Yakd 11.91
47 TestAddons/StoppedEnableDisable 12.31
48 TestCertOptions 31.48
49 TestCertExpiration 220.02
51 TestForceSystemdFlag 35.11
52 TestForceSystemdEnv 32.46
53 TestDockerEnvContainerd 47.02
58 TestErrorSpam/setup 28.63
59 TestErrorSpam/start 0.79
60 TestErrorSpam/status 1.11
61 TestErrorSpam/pause 1.85
62 TestErrorSpam/unpause 1.79
63 TestErrorSpam/stop 1.48
66 TestFunctional/serial/CopySyncFile 0
67 TestFunctional/serial/StartWithProxy 52.28
68 TestFunctional/serial/AuditLog 0
69 TestFunctional/serial/SoftStart 6.32
70 TestFunctional/serial/KubeContext 0.07
71 TestFunctional/serial/KubectlGetPods 0.13
74 TestFunctional/serial/CacheCmd/cache/add_remote 4.26
75 TestFunctional/serial/CacheCmd/cache/add_local 1.25
76 TestFunctional/serial/CacheCmd/cache/CacheDelete 0.05
77 TestFunctional/serial/CacheCmd/cache/list 0.05
78 TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node 0.33
79 TestFunctional/serial/CacheCmd/cache/cache_reload 2.03
80 TestFunctional/serial/CacheCmd/cache/delete 0.11
81 TestFunctional/serial/MinikubeKubectlCmd 0.14
82 TestFunctional/serial/MinikubeKubectlCmdDirectly 0.13
83 TestFunctional/serial/ExtraConfig 43.75
84 TestFunctional/serial/ComponentHealth 0.09
85 TestFunctional/serial/LogsCmd 1.74
86 TestFunctional/serial/LogsFileCmd 1.72
87 TestFunctional/serial/InvalidService 4.24
89 TestFunctional/parallel/ConfigCmd 0.46
90 TestFunctional/parallel/DashboardCmd 10.4
91 TestFunctional/parallel/DryRun 0.49
92 TestFunctional/parallel/InternationalLanguage 0.21
93 TestFunctional/parallel/StatusCmd 1.09
97 TestFunctional/parallel/ServiceCmdConnect 10.65
98 TestFunctional/parallel/AddonsCmd 0.15
99 TestFunctional/parallel/PersistentVolumeClaim 25.47
101 TestFunctional/parallel/SSHCmd 0.72
102 TestFunctional/parallel/CpCmd 2.16
104 TestFunctional/parallel/FileSync 0.32
105 TestFunctional/parallel/CertSync 2.03
109 TestFunctional/parallel/NodeLabels 0.08
111 TestFunctional/parallel/NonActiveRuntimeDisabled 0.78
113 TestFunctional/parallel/License 0.25
114 TestFunctional/parallel/Version/short 0.06
115 TestFunctional/parallel/Version/components 1.29
117 TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel 0.54
118 TestFunctional/parallel/ImageCommands/ImageListShort 0.27
119 TestFunctional/parallel/ImageCommands/ImageListTable 0.27
120 TestFunctional/parallel/ImageCommands/ImageListJson 0.28
121 TestFunctional/parallel/ImageCommands/ImageListYaml 0.23
122 TestFunctional/parallel/ImageCommands/ImageBuild 3.63
123 TestFunctional/parallel/ImageCommands/Setup 0.82
124 TestFunctional/parallel/TunnelCmd/serial/StartTunnel 0
126 TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup 9.48
127 TestFunctional/parallel/ImageCommands/ImageLoadDaemon 1.39
128 TestFunctional/parallel/ImageCommands/ImageReloadDaemon 1.27
129 TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon 1.36
130 TestFunctional/parallel/ImageCommands/ImageSaveToFile 0.34
131 TestFunctional/parallel/ImageCommands/ImageRemove 0.47
132 TestFunctional/parallel/ImageCommands/ImageLoadFromFile 0.7
133 TestFunctional/parallel/ImageCommands/ImageSaveDaemon 0.41
134 TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP 0.08
135 TestFunctional/parallel/TunnelCmd/serial/AccessDirect 0
139 TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel 0.11
140 TestFunctional/parallel/ServiceCmd/DeployApp 7.25
141 TestFunctional/parallel/ServiceCmd/List 0.52
142 TestFunctional/parallel/ServiceCmd/JSONOutput 0.5
143 TestFunctional/parallel/ServiceCmd/HTTPS 0.38
144 TestFunctional/parallel/ServiceCmd/Format 0.36
145 TestFunctional/parallel/ServiceCmd/URL 0.37
146 TestFunctional/parallel/UpdateContextCmd/no_changes 0.16
147 TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster 0.14
148 TestFunctional/parallel/UpdateContextCmd/no_clusters 0.15
149 TestFunctional/parallel/ProfileCmd/profile_not_create 0.48
150 TestFunctional/parallel/ProfileCmd/profile_list 0.43
151 TestFunctional/parallel/ProfileCmd/profile_json_output 0.41
152 TestFunctional/parallel/MountCmd/any-port 8.44
153 TestFunctional/parallel/MountCmd/specific-port 2.3
154 TestFunctional/parallel/MountCmd/VerifyCleanup 2.69
155 TestFunctional/delete_echo-server_images 0.04
156 TestFunctional/delete_my-image_image 0.02
157 TestFunctional/delete_minikube_cached_images 0.02
162 TestMultiControlPlane/serial/StartCluster 117.58
163 TestMultiControlPlane/serial/DeployApp 34.9
164 TestMultiControlPlane/serial/PingHostFromPods 1.74
165 TestMultiControlPlane/serial/AddWorkerNode 21.45
166 TestMultiControlPlane/serial/NodeLabels 0.11
167 TestMultiControlPlane/serial/HAppyAfterClusterStart 0.97
168 TestMultiControlPlane/serial/CopyFile 19.08
169 TestMultiControlPlane/serial/StopSecondaryNode 12.79
170 TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop 0.77
171 TestMultiControlPlane/serial/RestartSecondaryNode 17.81
172 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart 1.59
173 TestMultiControlPlane/serial/RestartClusterKeepsNodes 135.43
174 TestMultiControlPlane/serial/DeleteSecondaryNode 10.95
175 TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete 0.72
176 TestMultiControlPlane/serial/StopCluster 35.95
177 TestMultiControlPlane/serial/RestartCluster 73.5
178 TestMultiControlPlane/serial/DegradedAfterClusterRestart 0.7
179 TestMultiControlPlane/serial/AddSecondaryNode 44.62
180 TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd 1
184 TestJSONOutput/start/Command 68.85
185 TestJSONOutput/start/Audit 0
187 TestJSONOutput/start/parallel/DistinctCurrentSteps 0
188 TestJSONOutput/start/parallel/IncreasingCurrentSteps 0
190 TestJSONOutput/pause/Command 0.74
191 TestJSONOutput/pause/Audit 0
193 TestJSONOutput/pause/parallel/DistinctCurrentSteps 0
194 TestJSONOutput/pause/parallel/IncreasingCurrentSteps 0
196 TestJSONOutput/unpause/Command 0.65
197 TestJSONOutput/unpause/Audit 0
199 TestJSONOutput/unpause/parallel/DistinctCurrentSteps 0
200 TestJSONOutput/unpause/parallel/IncreasingCurrentSteps 0
202 TestJSONOutput/stop/Command 5.79
203 TestJSONOutput/stop/Audit 0
205 TestJSONOutput/stop/parallel/DistinctCurrentSteps 0
206 TestJSONOutput/stop/parallel/IncreasingCurrentSteps 0
207 TestErrorJSONOutput 0.25
209 TestKicCustomNetwork/create_custom_network 38.64
210 TestKicCustomNetwork/use_default_bridge_network 35.29
211 TestKicExistingNetwork 32.71
212 TestKicCustomSubnet 36.42
213 TestKicStaticIP 33.32
214 TestMainNoArgs 0.07
215 TestMinikubeProfile 67.76
218 TestMountStart/serial/StartWithMountFirst 5.95
219 TestMountStart/serial/VerifyMountFirst 0.26
220 TestMountStart/serial/StartWithMountSecond 6.76
221 TestMountStart/serial/VerifyMountSecond 0.25
222 TestMountStart/serial/DeleteFirst 1.61
223 TestMountStart/serial/VerifyMountPostDelete 0.25
224 TestMountStart/serial/Stop 1.2
225 TestMountStart/serial/RestartStopped 7.17
226 TestMountStart/serial/VerifyMountPostStop 0.28
229 TestMultiNode/serial/FreshStart2Nodes 63.95
231 TestMultiNode/serial/PingHostFrom2Pods 1.06
232 TestMultiNode/serial/AddNode 17.43
233 TestMultiNode/serial/MultiNodeLabels 0.1
234 TestMultiNode/serial/ProfileList 0.69
235 TestMultiNode/serial/CopyFile 9.98
236 TestMultiNode/serial/StopNode 2.25
237 TestMultiNode/serial/StartAfterStop 9.34
238 TestMultiNode/serial/RestartKeepsNodes 79.51
239 TestMultiNode/serial/DeleteNode 5.25
240 TestMultiNode/serial/StopMultiNode 23.99
241 TestMultiNode/serial/RestartMultiNode 49.66
242 TestMultiNode/serial/ValidateNameConflict 33.15
247 TestPreload 110.09
249 TestScheduledStopUnix 112.84
252 TestInsufficientStorage 10.27
253 TestRunningBinaryUpgrade 82.05
255 TestKubernetesUpgrade 347.22
256 TestMissingContainerUpgrade 177.13
258 TestNoKubernetes/serial/StartNoK8sWithVersion 0.11
259 TestNoKubernetes/serial/StartWithK8s 40.13
260 TestNoKubernetes/serial/StartWithStopK8s 17.73
261 TestNoKubernetes/serial/Start 5.48
262 TestNoKubernetes/serial/VerifyK8sNotRunning 0.27
263 TestNoKubernetes/serial/ProfileList 0.99
264 TestNoKubernetes/serial/Stop 1.23
265 TestNoKubernetes/serial/StartNoArgs 6.92
266 TestNoKubernetes/serial/VerifyK8sNotRunningSecond 0.3
267 TestStoppedBinaryUpgrade/Setup 0.68
268 TestStoppedBinaryUpgrade/Upgrade 107.81
269 TestStoppedBinaryUpgrade/MinikubeLogs 1.05
x
+
TestDownloadOnly/v1.20.0/json-events (6.85s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-036895 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=containerd --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-036895 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=containerd --driver=docker  --container-runtime=containerd: (6.845761053s)
--- PASS: TestDownloadOnly/v1.20.0/json-events (6.85s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/preload-exists
I0317 10:25:30.466082    7572 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
I0317 10:25:30.466158    7572 preload.go:146] Found local preload: /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
--- PASS: TestDownloadOnly/v1.20.0/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/LogsDuration (0.1s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-036895
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-036895: exit status 85 (95.525473ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      | End Time |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	| start   | -o=json --download-only        | download-only-036895 | jenkins | v1.35.0 | 17 Mar 25 10:25 UTC |          |
	|         | -p download-only-036895        |                      |         |         |                     |          |
	|         | --force --alsologtostderr      |                      |         |         |                     |          |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |          |
	|         | --container-runtime=containerd |                      |         |         |                     |          |
	|         | --driver=docker                |                      |         |         |                     |          |
	|         | --container-runtime=containerd |                      |         |         |                     |          |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|----------|
	
	
	==> Last Start <==
	Log file created at: 2025/03/17 10:25:23
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.24.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0317 10:25:23.667568    7577 out.go:345] Setting OutFile to fd 1 ...
	I0317 10:25:23.667712    7577 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:25:23.667724    7577 out.go:358] Setting ErrFile to fd 2...
	I0317 10:25:23.667741    7577 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:25:23.668054    7577 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	W0317 10:25:23.668181    7577 root.go:314] Error reading config file at /home/jenkins/minikube-integration/20535-2262/.minikube/config/config.json: open /home/jenkins/minikube-integration/20535-2262/.minikube/config/config.json: no such file or directory
	I0317 10:25:23.668610    7577 out.go:352] Setting JSON to true
	I0317 10:25:23.669382    7577 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":469,"bootTime":1742206655,"procs":150,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1077-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0317 10:25:23.669446    7577 start.go:139] virtualization:  
	I0317 10:25:23.673289    7577 out.go:97] [download-only-036895] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	W0317 10:25:23.673483    7577 preload.go:293] Failed to list preload files: open /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball: no such file or directory
	I0317 10:25:23.673605    7577 notify.go:220] Checking for updates...
	I0317 10:25:23.677239    7577 out.go:169] MINIKUBE_LOCATION=20535
	I0317 10:25:23.680198    7577 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0317 10:25:23.683173    7577 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:25:23.686086    7577 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	I0317 10:25:23.688991    7577 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0317 10:25:23.694607    7577 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0317 10:25:23.694860    7577 driver.go:394] Setting default libvirt URI to qemu:///system
	I0317 10:25:23.722210    7577 docker.go:123] docker version: linux-28.0.1:Docker Engine - Community
	I0317 10:25:23.722315    7577 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:25:24.140142    7577 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:28 OomKillDisable:true NGoroutines:53 SystemTime:2025-03-17 10:25:24.130760123 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:25:24.140252    7577 docker.go:318] overlay module found
	I0317 10:25:24.143358    7577 out.go:97] Using the docker driver based on user configuration
	I0317 10:25:24.143405    7577 start.go:297] selected driver: docker
	I0317 10:25:24.143418    7577 start.go:901] validating driver "docker" against <nil>
	I0317 10:25:24.143539    7577 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:25:24.199459    7577 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:28 OomKillDisable:true NGoroutines:53 SystemTime:2025-03-17 10:25:24.190988675 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:25:24.199616    7577 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0317 10:25:24.199910    7577 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0317 10:25:24.200084    7577 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0317 10:25:24.203103    7577 out.go:169] Using Docker driver with root privileges
	I0317 10:25:24.205900    7577 cni.go:84] Creating CNI manager for ""
	I0317 10:25:24.205961    7577 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0317 10:25:24.205973    7577 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0317 10:25:24.206057    7577 start.go:340] cluster config:
	{Name:download-only-036895 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:download-only-036895 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 10:25:24.208834    7577 out.go:97] Starting "download-only-036895" primary control-plane node in "download-only-036895" cluster
	I0317 10:25:24.208860    7577 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0317 10:25:24.211690    7577 out.go:97] Pulling base image v0.0.46-1741860993-20523 ...
	I0317 10:25:24.211728    7577 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0317 10:25:24.211801    7577 image.go:81] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon
	I0317 10:25:24.228203    7577 cache.go:150] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 to local cache
	I0317 10:25:24.228402    7577 image.go:65] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local cache directory
	I0317 10:25:24.228509    7577 image.go:150] Writing gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 to local cache
	I0317 10:25:24.275343    7577 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	I0317 10:25:24.275376    7577 cache.go:56] Caching tarball of preloaded images
	I0317 10:25:24.275533    7577 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime containerd
	I0317 10:25:24.278832    7577 out.go:97] Downloading Kubernetes v1.20.0 preload ...
	I0317 10:25:24.278853    7577 preload.go:236] getting checksum for preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4 ...
	I0317 10:25:24.410280    7577 download.go:108] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4?checksum=md5:7e3d48ccb9f143791669d02e14ce1643 -> /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.20.0-containerd-overlay2-arm64.tar.lz4
	
	
	* The control-plane node download-only-036895 host does not exist
	  To start a cluster, run: "minikube start -p download-only-036895"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.20.0/LogsDuration (0.10s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAll (0.22s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
--- PASS: TestDownloadOnly/v1.20.0/DeleteAll (0.22s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.14s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-036895
--- PASS: TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.14s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/json-events (6.1s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -o=json --download-only -p download-only-849191 --force --alsologtostderr --kubernetes-version=v1.32.2 --container-runtime=containerd --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -o=json --download-only -p download-only-849191 --force --alsologtostderr --kubernetes-version=v1.32.2 --container-runtime=containerd --driver=docker  --container-runtime=containerd: (6.09595333s)
--- PASS: TestDownloadOnly/v1.32.2/json-events (6.10s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/preload-exists
I0317 10:25:37.016125    7572 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
I0317 10:25:37.016171    7572 preload.go:146] Found local preload: /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4
--- PASS: TestDownloadOnly/v1.32.2/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/LogsDuration (0.09s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-linux-arm64 logs -p download-only-849191
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-linux-arm64 logs -p download-only-849191: exit status 85 (92.422824ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| Command |              Args              |       Profile        |  User   | Version |     Start Time      |      End Time       |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	| start   | -o=json --download-only        | download-only-036895 | jenkins | v1.35.0 | 17 Mar 25 10:25 UTC |                     |
	|         | -p download-only-036895        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.20.0   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	| delete  | --all                          | minikube             | jenkins | v1.35.0 | 17 Mar 25 10:25 UTC | 17 Mar 25 10:25 UTC |
	| delete  | -p download-only-036895        | download-only-036895 | jenkins | v1.35.0 | 17 Mar 25 10:25 UTC | 17 Mar 25 10:25 UTC |
	| start   | -o=json --download-only        | download-only-849191 | jenkins | v1.35.0 | 17 Mar 25 10:25 UTC |                     |
	|         | -p download-only-849191        |                      |         |         |                     |                     |
	|         | --force --alsologtostderr      |                      |         |         |                     |                     |
	|         | --kubernetes-version=v1.32.2   |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|         | --driver=docker                |                      |         |         |                     |                     |
	|         | --container-runtime=containerd |                      |         |         |                     |                     |
	|---------|--------------------------------|----------------------|---------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/03/17 10:25:30
	Running on machine: ip-172-31-30-239
	Binary: Built with gc go1.24.0 for linux/arm64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0317 10:25:30.964721    7774 out.go:345] Setting OutFile to fd 1 ...
	I0317 10:25:30.964869    7774 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:25:30.964879    7774 out.go:358] Setting ErrFile to fd 2...
	I0317 10:25:30.964885    7774 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:25:30.965173    7774 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 10:25:30.965611    7774 out.go:352] Setting JSON to true
	I0317 10:25:30.966381    7774 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":476,"bootTime":1742206655,"procs":146,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1077-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0317 10:25:30.966444    7774 start.go:139] virtualization:  
	I0317 10:25:30.969866    7774 out.go:97] [download-only-849191] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	I0317 10:25:30.970118    7774 notify.go:220] Checking for updates...
	I0317 10:25:30.973071    7774 out.go:169] MINIKUBE_LOCATION=20535
	I0317 10:25:30.976025    7774 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0317 10:25:30.979038    7774 out.go:169] KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:25:30.981859    7774 out.go:169] MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	I0317 10:25:30.984739    7774 out.go:169] MINIKUBE_BIN=out/minikube-linux-arm64
	W0317 10:25:30.990675    7774 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0317 10:25:30.990959    7774 driver.go:394] Setting default libvirt URI to qemu:///system
	I0317 10:25:31.025134    7774 docker.go:123] docker version: linux-28.0.1:Docker Engine - Community
	I0317 10:25:31.025238    7774 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:25:31.083474    7774 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:27 OomKillDisable:true NGoroutines:44 SystemTime:2025-03-17 10:25:31.074576939 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:25:31.083599    7774 docker.go:318] overlay module found
	I0317 10:25:31.086615    7774 out.go:97] Using the docker driver based on user configuration
	I0317 10:25:31.086661    7774 start.go:297] selected driver: docker
	I0317 10:25:31.086678    7774 start.go:901] validating driver "docker" against <nil>
	I0317 10:25:31.086790    7774 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:25:31.152795    7774 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:0 ContainersRunning:0 ContainersPaused:0 ContainersStopped:0 Images:1 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:27 OomKillDisable:true NGoroutines:44 SystemTime:2025-03-17 10:25:31.144101499 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:25:31.152971    7774 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0317 10:25:31.153242    7774 start_flags.go:393] Using suggested 2200MB memory alloc based on sys=7834MB, container=7834MB
	I0317 10:25:31.153390    7774 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0317 10:25:31.156485    7774 out.go:169] Using Docker driver with root privileges
	I0317 10:25:31.159319    7774 cni.go:84] Creating CNI manager for ""
	I0317 10:25:31.159392    7774 cni.go:143] "docker" driver + "containerd" runtime found, recommending kindnet
	I0317 10:25:31.159413    7774 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0317 10:25:31.159503    7774 start.go:340] cluster config:
	{Name:download-only-849191 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:2200 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:download-only-849191 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 10:25:31.162579    7774 out.go:97] Starting "download-only-849191" primary control-plane node in "download-only-849191" cluster
	I0317 10:25:31.162600    7774 cache.go:121] Beginning downloading kic base image for docker with containerd
	I0317 10:25:31.165418    7774 out.go:97] Pulling base image v0.0.46-1741860993-20523 ...
	I0317 10:25:31.165445    7774 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
	I0317 10:25:31.165602    7774 image.go:81] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local docker daemon
	I0317 10:25:31.182136    7774 cache.go:150] Downloading gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 to local cache
	I0317 10:25:31.182260    7774 image.go:65] Checking for gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local cache directory
	I0317 10:25:31.182285    7774 image.go:68] Found gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 in local cache directory, skipping pull
	I0317 10:25:31.182290    7774 image.go:137] gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 exists in cache, skipping pull
	I0317 10:25:31.182303    7774 cache.go:153] successfully saved gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 as a tarball
	I0317 10:25:31.219689    7774 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.32.2/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4
	I0317 10:25:31.219709    7774 cache.go:56] Caching tarball of preloaded images
	I0317 10:25:31.219855    7774 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime containerd
	I0317 10:25:31.222925    7774 out.go:97] Downloading Kubernetes v1.32.2 preload ...
	I0317 10:25:31.222954    7774 preload.go:236] getting checksum for preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4 ...
	I0317 10:25:31.303533    7774 download.go:108] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.32.2/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4?checksum=md5:e856099fc6207e912f24bef3ed2763e0 -> /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4
	I0317 10:25:35.505207    7774 preload.go:247] saving checksum for preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4 ...
	I0317 10:25:35.505308    7774 preload.go:254] verifying checksum of /home/jenkins/minikube-integration/20535-2262/.minikube/cache/preloaded-tarball/preloaded-images-k8s-v18-v1.32.2-containerd-overlay2-arm64.tar.lz4 ...
	
	
	* The control-plane node download-only-849191 host does not exist
	  To start a cluster, run: "minikube start -p download-only-849191"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.32.2/LogsDuration (0.09s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/DeleteAll (0.23s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-linux-arm64 delete --all
--- PASS: TestDownloadOnly/v1.32.2/DeleteAll (0.23s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds (0.15s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-linux-arm64 delete -p download-only-849191
--- PASS: TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds (0.15s)

                                                
                                    
x
+
TestBinaryMirror (0.59s)

                                                
                                                
=== RUN   TestBinaryMirror
I0317 10:25:38.331425    7572 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/linux/arm64/kubectl?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/arm64/kubectl.sha256
aaa_download_only_test.go:314: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p binary-mirror-067613 --alsologtostderr --binary-mirror http://127.0.0.1:38021 --driver=docker  --container-runtime=containerd
helpers_test.go:175: Cleaning up "binary-mirror-067613" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p binary-mirror-067613
--- PASS: TestBinaryMirror (0.59s)

                                                
                                    
x
+
TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.07s)

                                                
                                                
=== RUN   TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/EnablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
addons_test.go:939: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-574058
addons_test.go:939: (dbg) Non-zero exit: out/minikube-linux-arm64 addons enable dashboard -p addons-574058: exit status 85 (72.458236ms)

                                                
                                                
-- stdout --
	* Profile "addons-574058" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-574058"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.07s)

                                                
                                    
x
+
TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.08s)

                                                
                                                
=== RUN   TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/DisablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
addons_test.go:950: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-574058
addons_test.go:950: (dbg) Non-zero exit: out/minikube-linux-arm64 addons disable dashboard -p addons-574058: exit status 85 (75.314723ms)

                                                
                                                
-- stdout --
	* Profile "addons-574058" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-574058"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.08s)

                                                
                                    
x
+
TestAddons/Setup (218.21s)

                                                
                                                
=== RUN   TestAddons/Setup
addons_test.go:107: (dbg) Run:  out/minikube-linux-arm64 start -p addons-574058 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=nvidia-device-plugin --addons=yakd --addons=volcano --addons=amd-gpu-device-plugin --driver=docker  --container-runtime=containerd --addons=ingress --addons=ingress-dns --addons=storage-provisioner-rancher
addons_test.go:107: (dbg) Done: out/minikube-linux-arm64 start -p addons-574058 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=nvidia-device-plugin --addons=yakd --addons=volcano --addons=amd-gpu-device-plugin --driver=docker  --container-runtime=containerd --addons=ingress --addons=ingress-dns --addons=storage-provisioner-rancher: (3m38.209354307s)
--- PASS: TestAddons/Setup (218.21s)

                                                
                                    
x
+
TestAddons/serial/Volcano (41.3s)

                                                
                                                
=== RUN   TestAddons/serial/Volcano
addons_test.go:823: volcano-controller stabilized in 60.109867ms
addons_test.go:815: volcano-admission stabilized in 61.557383ms
addons_test.go:807: volcano-scheduler stabilized in 61.982146ms
addons_test.go:829: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-scheduler" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-scheduler-75fdd99bcf-scm4b" [6ba61588-f6fe-4a3e-a930-1ce7d314c2c0] Running
addons_test.go:829: (dbg) TestAddons/serial/Volcano: app=volcano-scheduler healthy within 6.002929931s
addons_test.go:833: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-admission" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-admission-75d8f6b5c-kh4vr" [9db3ac8d-2787-4a06-9447-eadfd265d538] Running
addons_test.go:833: (dbg) TestAddons/serial/Volcano: app=volcano-admission healthy within 5.003470828s
addons_test.go:837: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-controller" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-controllers-86bdc5c9c-nh74f" [6cbce0a0-dc6e-40b6-b3f2-b38186b92366] Running
addons_test.go:837: (dbg) TestAddons/serial/Volcano: app=volcano-controller healthy within 6.003784108s
addons_test.go:842: (dbg) Run:  kubectl --context addons-574058 delete -n volcano-system job volcano-admission-init
addons_test.go:848: (dbg) Run:  kubectl --context addons-574058 create -f testdata/vcjob.yaml
addons_test.go:856: (dbg) Run:  kubectl --context addons-574058 get vcjob -n my-volcano
addons_test.go:874: (dbg) TestAddons/serial/Volcano: waiting 3m0s for pods matching "volcano.sh/job-name=test-job" in namespace "my-volcano" ...
helpers_test.go:344: "test-job-nginx-0" [6946a7c5-3695-4d0b-a94a-8110bd54bbb7] Pending
helpers_test.go:344: "test-job-nginx-0" [6946a7c5-3695-4d0b-a94a-8110bd54bbb7] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "test-job-nginx-0" [6946a7c5-3695-4d0b-a94a-8110bd54bbb7] Running
addons_test.go:874: (dbg) TestAddons/serial/Volcano: volcano.sh/job-name=test-job healthy within 12.003325275s
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable volcano --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable volcano --alsologtostderr -v=1: (11.621247231s)
--- PASS: TestAddons/serial/Volcano (41.30s)

                                                
                                    
x
+
TestAddons/serial/GCPAuth/Namespaces (0.19s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/Namespaces
addons_test.go:569: (dbg) Run:  kubectl --context addons-574058 create ns new-namespace
addons_test.go:583: (dbg) Run:  kubectl --context addons-574058 get secret gcp-auth -n new-namespace
--- PASS: TestAddons/serial/GCPAuth/Namespaces (0.19s)

                                                
                                    
x
+
TestAddons/serial/GCPAuth/FakeCredentials (9.87s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/FakeCredentials
addons_test.go:614: (dbg) Run:  kubectl --context addons-574058 create -f testdata/busybox.yaml
addons_test.go:621: (dbg) Run:  kubectl --context addons-574058 create sa gcp-auth-test
addons_test.go:627: (dbg) TestAddons/serial/GCPAuth/FakeCredentials: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:344: "busybox" [8310a262-5e53-408d-a181-a5bd08c7441f] Pending
helpers_test.go:344: "busybox" [8310a262-5e53-408d-a181-a5bd08c7441f] Running
addons_test.go:627: (dbg) TestAddons/serial/GCPAuth/FakeCredentials: integration-test=busybox healthy within 9.003756864s
addons_test.go:633: (dbg) Run:  kubectl --context addons-574058 exec busybox -- /bin/sh -c "printenv GOOGLE_APPLICATION_CREDENTIALS"
addons_test.go:645: (dbg) Run:  kubectl --context addons-574058 describe sa gcp-auth-test
addons_test.go:659: (dbg) Run:  kubectl --context addons-574058 exec busybox -- /bin/sh -c "cat /google-app-creds.json"
addons_test.go:683: (dbg) Run:  kubectl --context addons-574058 exec busybox -- /bin/sh -c "printenv GOOGLE_CLOUD_PROJECT"
--- PASS: TestAddons/serial/GCPAuth/FakeCredentials (9.87s)

                                                
                                    
x
+
TestAddons/parallel/Registry (15.98s)

                                                
                                                
=== RUN   TestAddons/parallel/Registry
=== PAUSE TestAddons/parallel/Registry

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:321: registry stabilized in 3.142417ms
addons_test.go:323: (dbg) TestAddons/parallel/Registry: waiting 6m0s for pods matching "actual-registry=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-6c88467877-jsv7x" [b98db6af-b2ec-412e-9660-6901b1778d89] Running
addons_test.go:323: (dbg) TestAddons/parallel/Registry: actual-registry=true healthy within 6.003143586s
addons_test.go:326: (dbg) TestAddons/parallel/Registry: waiting 10m0s for pods matching "registry-proxy=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-proxy-pjjcr" [149398c5-11b1-41f9-9be4-313bf28b1e28] Running
addons_test.go:326: (dbg) TestAddons/parallel/Registry: registry-proxy=true healthy within 5.00411271s
addons_test.go:331: (dbg) Run:  kubectl --context addons-574058 delete po -l run=registry-test --now
addons_test.go:336: (dbg) Run:  kubectl --context addons-574058 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local"
addons_test.go:336: (dbg) Done: kubectl --context addons-574058 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local": (3.971045883s)
addons_test.go:350: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 ip
2025/03/17 10:30:32 [DEBUG] GET http://192.168.49.2:5000
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable registry --alsologtostderr -v=1
--- PASS: TestAddons/parallel/Registry (15.98s)

                                                
                                    
x
+
TestAddons/parallel/Ingress (21.27s)

                                                
                                                
=== RUN   TestAddons/parallel/Ingress
=== PAUSE TestAddons/parallel/Ingress

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:207: (dbg) Run:  kubectl --context addons-574058 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s
addons_test.go:232: (dbg) Run:  kubectl --context addons-574058 replace --force -f testdata/nginx-ingress-v1.yaml
addons_test.go:245: (dbg) Run:  kubectl --context addons-574058 replace --force -f testdata/nginx-pod-svc.yaml
addons_test.go:250: (dbg) TestAddons/parallel/Ingress: waiting 8m0s for pods matching "run=nginx" in namespace "default" ...
helpers_test.go:344: "nginx" [38ec9eaa-ff0f-4ea3-90a4-e1eba556a226] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "nginx" [38ec9eaa-ff0f-4ea3-90a4-e1eba556a226] Running
addons_test.go:250: (dbg) TestAddons/parallel/Ingress: run=nginx healthy within 9.00272599s
I0317 10:31:02.275100    7572 kapi.go:150] Service nginx in namespace default found.
addons_test.go:262: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"
addons_test.go:286: (dbg) Run:  kubectl --context addons-574058 replace --force -f testdata/ingress-dns-example-v1.yaml
addons_test.go:291: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 ip
addons_test.go:297: (dbg) Run:  nslookup hello-john.test 192.168.49.2
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable ingress-dns --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable ingress-dns --alsologtostderr -v=1: (2.212696117s)
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable ingress --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable ingress --alsologtostderr -v=1: (7.997844899s)
--- PASS: TestAddons/parallel/Ingress (21.27s)

                                                
                                    
x
+
TestAddons/parallel/InspektorGadget (11.81s)

                                                
                                                
=== RUN   TestAddons/parallel/InspektorGadget
=== PAUSE TestAddons/parallel/InspektorGadget

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/InspektorGadget
addons_test.go:762: (dbg) TestAddons/parallel/InspektorGadget: waiting 8m0s for pods matching "k8s-app=gadget" in namespace "gadget" ...
helpers_test.go:344: "gadget-hdzcb" [f75d747b-ed88-482b-99d8-86ce7a4baade] Running
addons_test.go:762: (dbg) TestAddons/parallel/InspektorGadget: k8s-app=gadget healthy within 6.003897044s
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable inspektor-gadget --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable inspektor-gadget --alsologtostderr -v=1: (5.806608601s)
--- PASS: TestAddons/parallel/InspektorGadget (11.81s)

                                                
                                    
x
+
TestAddons/parallel/MetricsServer (5.96s)

                                                
                                                
=== RUN   TestAddons/parallel/MetricsServer
=== PAUSE TestAddons/parallel/MetricsServer

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:394: metrics-server stabilized in 6.452892ms
addons_test.go:396: (dbg) TestAddons/parallel/MetricsServer: waiting 6m0s for pods matching "k8s-app=metrics-server" in namespace "kube-system" ...
helpers_test.go:344: "metrics-server-7fbb699795-h4g8q" [4b7a6c33-b489-45ea-8f7a-8500129a3677] Running
addons_test.go:396: (dbg) TestAddons/parallel/MetricsServer: k8s-app=metrics-server healthy within 5.004632944s
addons_test.go:402: (dbg) Run:  kubectl --context addons-574058 top pods -n kube-system
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable metrics-server --alsologtostderr -v=1
--- PASS: TestAddons/parallel/MetricsServer (5.96s)

                                                
                                    
x
+
TestAddons/parallel/CSI (37.84s)

                                                
                                                
=== RUN   TestAddons/parallel/CSI
=== PAUSE TestAddons/parallel/CSI

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CSI
I0317 10:30:33.650914    7572 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
I0317 10:30:33.656840    7572 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
I0317 10:30:33.656865    7572 kapi.go:107] duration metric: took 8.724127ms to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
addons_test.go:488: csi-hostpath-driver pods stabilized in 8.734268ms
addons_test.go:491: (dbg) Run:  kubectl --context addons-574058 create -f testdata/csi-hostpath-driver/pvc.yaml
addons_test.go:496: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc -o jsonpath={.status.phase} -n default
addons_test.go:501: (dbg) Run:  kubectl --context addons-574058 create -f testdata/csi-hostpath-driver/pv-pod.yaml
addons_test.go:506: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod" in namespace "default" ...
helpers_test.go:344: "task-pv-pod" [fae76f62-f596-4f10-b0f8-f6e46202116a] Pending
helpers_test.go:344: "task-pv-pod" [fae76f62-f596-4f10-b0f8-f6e46202116a] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])
helpers_test.go:344: "task-pv-pod" [fae76f62-f596-4f10-b0f8-f6e46202116a] Running
addons_test.go:506: (dbg) TestAddons/parallel/CSI: app=task-pv-pod healthy within 7.003524938s
addons_test.go:511: (dbg) Run:  kubectl --context addons-574058 create -f testdata/csi-hostpath-driver/snapshot.yaml
addons_test.go:516: (dbg) TestAddons/parallel/CSI: waiting 6m0s for volume snapshot "new-snapshot-demo" in namespace "default" ...
helpers_test.go:419: (dbg) Run:  kubectl --context addons-574058 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
helpers_test.go:419: (dbg) Run:  kubectl --context addons-574058 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
addons_test.go:521: (dbg) Run:  kubectl --context addons-574058 delete pod task-pv-pod
addons_test.go:521: (dbg) Done: kubectl --context addons-574058 delete pod task-pv-pod: (1.29383788s)
addons_test.go:527: (dbg) Run:  kubectl --context addons-574058 delete pvc hpvc
addons_test.go:533: (dbg) Run:  kubectl --context addons-574058 create -f testdata/csi-hostpath-driver/pvc-restore.yaml
addons_test.go:538: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc-restore" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
addons_test.go:543: (dbg) Run:  kubectl --context addons-574058 create -f testdata/csi-hostpath-driver/pv-pod-restore.yaml
addons_test.go:548: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod-restore" in namespace "default" ...
helpers_test.go:344: "task-pv-pod-restore" [c21b49dc-a5d6-4d28-9e68-b71db0e0ed60] Pending
helpers_test.go:344: "task-pv-pod-restore" [c21b49dc-a5d6-4d28-9e68-b71db0e0ed60] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])
helpers_test.go:344: "task-pv-pod-restore" [c21b49dc-a5d6-4d28-9e68-b71db0e0ed60] Running
addons_test.go:548: (dbg) TestAddons/parallel/CSI: app=task-pv-pod-restore healthy within 8.004550591s
addons_test.go:553: (dbg) Run:  kubectl --context addons-574058 delete pod task-pv-pod-restore
addons_test.go:553: (dbg) Done: kubectl --context addons-574058 delete pod task-pv-pod-restore: (1.343027581s)
addons_test.go:557: (dbg) Run:  kubectl --context addons-574058 delete pvc hpvc-restore
addons_test.go:561: (dbg) Run:  kubectl --context addons-574058 delete volumesnapshot new-snapshot-demo
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable volumesnapshots --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable volumesnapshots --alsologtostderr -v=1: (1.128635309s)
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable csi-hostpath-driver --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable csi-hostpath-driver --alsologtostderr -v=1: (6.833599969s)
--- PASS: TestAddons/parallel/CSI (37.84s)

                                                
                                    
x
+
TestAddons/parallel/Headlamp (17.11s)

                                                
                                                
=== RUN   TestAddons/parallel/Headlamp
=== PAUSE TestAddons/parallel/Headlamp

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Headlamp
addons_test.go:747: (dbg) Run:  out/minikube-linux-arm64 addons enable headlamp -p addons-574058 --alsologtostderr -v=1
addons_test.go:747: (dbg) Done: out/minikube-linux-arm64 addons enable headlamp -p addons-574058 --alsologtostderr -v=1: (1.065607127s)
addons_test.go:752: (dbg) TestAddons/parallel/Headlamp: waiting 8m0s for pods matching "app.kubernetes.io/name=headlamp" in namespace "headlamp" ...
helpers_test.go:344: "headlamp-5d4b5d7bd6-5bzmf" [a38b00c8-5352-4f86-8bab-ca176f733268] Pending / Ready:ContainersNotReady (containers with unready status: [headlamp]) / ContainersReady:ContainersNotReady (containers with unready status: [headlamp])
helpers_test.go:344: "headlamp-5d4b5d7bd6-5bzmf" [a38b00c8-5352-4f86-8bab-ca176f733268] Running
addons_test.go:752: (dbg) TestAddons/parallel/Headlamp: app.kubernetes.io/name=headlamp healthy within 10.003850224s
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable headlamp --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable headlamp --alsologtostderr -v=1: (6.041476089s)
--- PASS: TestAddons/parallel/Headlamp (17.11s)

                                                
                                    
x
+
TestAddons/parallel/CloudSpanner (5.55s)

                                                
                                                
=== RUN   TestAddons/parallel/CloudSpanner
=== PAUSE TestAddons/parallel/CloudSpanner

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CloudSpanner
addons_test.go:779: (dbg) TestAddons/parallel/CloudSpanner: waiting 6m0s for pods matching "app=cloud-spanner-emulator" in namespace "default" ...
helpers_test.go:344: "cloud-spanner-emulator-754dc876cd-n5qwc" [72756425-8368-4b38-99b5-1fbf0c7c1776] Running
addons_test.go:779: (dbg) TestAddons/parallel/CloudSpanner: app=cloud-spanner-emulator healthy within 5.003710804s
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable cloud-spanner --alsologtostderr -v=1
--- PASS: TestAddons/parallel/CloudSpanner (5.55s)

                                                
                                    
x
+
TestAddons/parallel/LocalPath (8.52s)

                                                
                                                
=== RUN   TestAddons/parallel/LocalPath
=== PAUSE TestAddons/parallel/LocalPath

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/LocalPath
addons_test.go:888: (dbg) Run:  kubectl --context addons-574058 apply -f testdata/storage-provisioner-rancher/pvc.yaml
addons_test.go:894: (dbg) Run:  kubectl --context addons-574058 apply -f testdata/storage-provisioner-rancher/pod.yaml
addons_test.go:898: (dbg) TestAddons/parallel/LocalPath: waiting 5m0s for pvc "test-pvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-574058 get pvc test-pvc -o jsonpath={.status.phase} -n default
addons_test.go:901: (dbg) TestAddons/parallel/LocalPath: waiting 3m0s for pods matching "run=test-local-path" in namespace "default" ...
helpers_test.go:344: "test-local-path" [3d5f01a8-2a04-4a3b-b754-050cd3546231] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:344: "test-local-path" [3d5f01a8-2a04-4a3b-b754-050cd3546231] Pending / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
helpers_test.go:344: "test-local-path" [3d5f01a8-2a04-4a3b-b754-050cd3546231] Succeeded / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
addons_test.go:901: (dbg) TestAddons/parallel/LocalPath: run=test-local-path healthy within 3.003610803s
addons_test.go:906: (dbg) Run:  kubectl --context addons-574058 get pvc test-pvc -o=json
addons_test.go:915: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 ssh "cat /opt/local-path-provisioner/pvc-d086f209-b13e-4313-a713-a728cc04d061_default_test-pvc/file1"
addons_test.go:927: (dbg) Run:  kubectl --context addons-574058 delete pod test-local-path
addons_test.go:931: (dbg) Run:  kubectl --context addons-574058 delete pvc test-pvc
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable storage-provisioner-rancher --alsologtostderr -v=1
--- PASS: TestAddons/parallel/LocalPath (8.52s)

                                                
                                    
x
+
TestAddons/parallel/NvidiaDevicePlugin (6.49s)

                                                
                                                
=== RUN   TestAddons/parallel/NvidiaDevicePlugin
=== PAUSE TestAddons/parallel/NvidiaDevicePlugin

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/NvidiaDevicePlugin
addons_test.go:964: (dbg) TestAddons/parallel/NvidiaDevicePlugin: waiting 6m0s for pods matching "name=nvidia-device-plugin-ds" in namespace "kube-system" ...
helpers_test.go:344: "nvidia-device-plugin-daemonset-k4x9n" [9defd984-8f7b-4987-9a6a-4267137a42e4] Running
addons_test.go:964: (dbg) TestAddons/parallel/NvidiaDevicePlugin: name=nvidia-device-plugin-ds healthy within 6.003340576s
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable nvidia-device-plugin --alsologtostderr -v=1
--- PASS: TestAddons/parallel/NvidiaDevicePlugin (6.49s)

                                                
                                    
x
+
TestAddons/parallel/Yakd (11.91s)

                                                
                                                
=== RUN   TestAddons/parallel/Yakd
=== PAUSE TestAddons/parallel/Yakd

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Yakd
addons_test.go:986: (dbg) TestAddons/parallel/Yakd: waiting 2m0s for pods matching "app.kubernetes.io/name=yakd-dashboard" in namespace "yakd-dashboard" ...
helpers_test.go:344: "yakd-dashboard-575dd5996b-8xrg8" [2d35b9f9-0de0-44ad-a530-b1947ce09873] Running
addons_test.go:986: (dbg) TestAddons/parallel/Yakd: app.kubernetes.io/name=yakd-dashboard healthy within 6.007783691s
addons_test.go:992: (dbg) Run:  out/minikube-linux-arm64 -p addons-574058 addons disable yakd --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-linux-arm64 -p addons-574058 addons disable yakd --alsologtostderr -v=1: (5.905509127s)
--- PASS: TestAddons/parallel/Yakd (11.91s)

                                                
                                    
x
+
TestAddons/StoppedEnableDisable (12.31s)

                                                
                                                
=== RUN   TestAddons/StoppedEnableDisable
addons_test.go:170: (dbg) Run:  out/minikube-linux-arm64 stop -p addons-574058
addons_test.go:170: (dbg) Done: out/minikube-linux-arm64 stop -p addons-574058: (12.035378693s)
addons_test.go:174: (dbg) Run:  out/minikube-linux-arm64 addons enable dashboard -p addons-574058
addons_test.go:178: (dbg) Run:  out/minikube-linux-arm64 addons disable dashboard -p addons-574058
addons_test.go:183: (dbg) Run:  out/minikube-linux-arm64 addons disable gvisor -p addons-574058
--- PASS: TestAddons/StoppedEnableDisable (12.31s)

                                                
                                    
x
+
TestCertOptions (31.48s)

                                                
                                                
=== RUN   TestCertOptions
=== PAUSE TestCertOptions

                                                
                                                

                                                
                                                
=== CONT  TestCertOptions
cert_options_test.go:49: (dbg) Run:  out/minikube-linux-arm64 start -p cert-options-469914 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=containerd
E0317 11:24:00.312997    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
cert_options_test.go:49: (dbg) Done: out/minikube-linux-arm64 start -p cert-options-469914 --memory=2048 --apiserver-ips=127.0.0.1 --apiserver-ips=192.168.15.15 --apiserver-names=localhost --apiserver-names=www.google.com --apiserver-port=8555 --driver=docker  --container-runtime=containerd: (28.832345794s)
cert_options_test.go:60: (dbg) Run:  out/minikube-linux-arm64 -p cert-options-469914 ssh "openssl x509 -text -noout -in /var/lib/minikube/certs/apiserver.crt"
cert_options_test.go:88: (dbg) Run:  kubectl --context cert-options-469914 config view
cert_options_test.go:100: (dbg) Run:  out/minikube-linux-arm64 ssh -p cert-options-469914 -- "sudo cat /etc/kubernetes/admin.conf"
helpers_test.go:175: Cleaning up "cert-options-469914" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-options-469914
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-options-469914: (2.001599577s)
--- PASS: TestCertOptions (31.48s)

                                                
                                    
x
+
TestCertExpiration (220.02s)

                                                
                                                
=== RUN   TestCertExpiration
=== PAUSE TestCertExpiration

                                                
                                                

                                                
                                                
=== CONT  TestCertExpiration
cert_options_test.go:123: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-111228 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=containerd
E0317 11:20:19.262418    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
cert_options_test.go:123: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-111228 --memory=2048 --cert-expiration=3m --driver=docker  --container-runtime=containerd: (30.667514851s)
cert_options_test.go:131: (dbg) Run:  out/minikube-linux-arm64 start -p cert-expiration-111228 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=containerd
cert_options_test.go:131: (dbg) Done: out/minikube-linux-arm64 start -p cert-expiration-111228 --memory=2048 --cert-expiration=8760h --driver=docker  --container-runtime=containerd: (7.303062097s)
helpers_test.go:175: Cleaning up "cert-expiration-111228" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p cert-expiration-111228
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p cert-expiration-111228: (2.052219131s)
--- PASS: TestCertExpiration (220.02s)

                                                
                                    
x
+
TestForceSystemdFlag (35.11s)

                                                
                                                
=== RUN   TestForceSystemdFlag
=== PAUSE TestForceSystemdFlag

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdFlag
docker_test.go:91: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-flag-050260 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd
docker_test.go:91: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-flag-050260 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd: (32.80185264s)
docker_test.go:121: (dbg) Run:  out/minikube-linux-arm64 -p force-systemd-flag-050260 ssh "cat /etc/containerd/config.toml"
helpers_test.go:175: Cleaning up "force-systemd-flag-050260" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-flag-050260
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-flag-050260: (2.003130146s)
--- PASS: TestForceSystemdFlag (35.11s)

                                                
                                    
x
+
TestForceSystemdEnv (32.46s)

                                                
                                                
=== RUN   TestForceSystemdEnv
=== PAUSE TestForceSystemdEnv

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdEnv
docker_test.go:155: (dbg) Run:  out/minikube-linux-arm64 start -p force-systemd-env-194349 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd
docker_test.go:155: (dbg) Done: out/minikube-linux-arm64 start -p force-systemd-env-194349 --memory=2048 --alsologtostderr -v=5 --driver=docker  --container-runtime=containerd: (30.143726178s)
docker_test.go:121: (dbg) Run:  out/minikube-linux-arm64 -p force-systemd-env-194349 ssh "cat /etc/containerd/config.toml"
helpers_test.go:175: Cleaning up "force-systemd-env-194349" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p force-systemd-env-194349
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p force-systemd-env-194349: (1.99912478s)
--- PASS: TestForceSystemdEnv (32.46s)

                                                
                                    
x
+
TestDockerEnvContainerd (47.02s)

                                                
                                                
=== RUN   TestDockerEnvContainerd
docker_test.go:170: running with containerd true linux arm64
docker_test.go:181: (dbg) Run:  out/minikube-linux-arm64 start -p dockerenv-551489 --driver=docker  --container-runtime=containerd
docker_test.go:181: (dbg) Done: out/minikube-linux-arm64 start -p dockerenv-551489 --driver=docker  --container-runtime=containerd: (31.395312465s)
docker_test.go:189: (dbg) Run:  /bin/bash -c "out/minikube-linux-arm64 docker-env --ssh-host --ssh-add -p dockerenv-551489"
docker_test.go:220: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-EDzGLWnRXRrk/agent.28517" SSH_AGENT_PID="28518" DOCKER_HOST=ssh://docker@127.0.0.1:32773 docker version"
docker_test.go:243: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-EDzGLWnRXRrk/agent.28517" SSH_AGENT_PID="28518" DOCKER_HOST=ssh://docker@127.0.0.1:32773 DOCKER_BUILDKIT=0 docker build -t local/minikube-dockerenv-containerd-test:latest testdata/docker-env"
docker_test.go:243: (dbg) Done: /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-EDzGLWnRXRrk/agent.28517" SSH_AGENT_PID="28518" DOCKER_HOST=ssh://docker@127.0.0.1:32773 DOCKER_BUILDKIT=0 docker build -t local/minikube-dockerenv-containerd-test:latest testdata/docker-env": (1.169864661s)
docker_test.go:250: (dbg) Run:  /bin/bash -c "SSH_AUTH_SOCK="/tmp/ssh-EDzGLWnRXRrk/agent.28517" SSH_AGENT_PID="28518" DOCKER_HOST=ssh://docker@127.0.0.1:32773 docker image ls"
helpers_test.go:175: Cleaning up "dockerenv-551489" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p dockerenv-551489
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p dockerenv-551489: (1.999176613s)
--- PASS: TestDockerEnvContainerd (47.02s)

                                                
                                    
x
+
TestErrorSpam/setup (28.63s)

                                                
                                                
=== RUN   TestErrorSpam/setup
error_spam_test.go:81: (dbg) Run:  out/minikube-linux-arm64 start -p nospam-376105 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-376105 --driver=docker  --container-runtime=containerd
error_spam_test.go:81: (dbg) Done: out/minikube-linux-arm64 start -p nospam-376105 -n=1 --memory=2250 --wait=false --log_dir=/tmp/nospam-376105 --driver=docker  --container-runtime=containerd: (28.626677254s)
--- PASS: TestErrorSpam/setup (28.63s)

                                                
                                    
x
+
TestErrorSpam/start (0.79s)

                                                
                                                
=== RUN   TestErrorSpam/start
error_spam_test.go:216: Cleaning up 1 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 start --dry-run
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 start --dry-run
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 start --dry-run
--- PASS: TestErrorSpam/start (0.79s)

                                                
                                    
x
+
TestErrorSpam/status (1.11s)

                                                
                                                
=== RUN   TestErrorSpam/status
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 status
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 status
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 status
--- PASS: TestErrorSpam/status (1.11s)

                                                
                                    
x
+
TestErrorSpam/pause (1.85s)

                                                
                                                
=== RUN   TestErrorSpam/pause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 pause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 pause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 pause
--- PASS: TestErrorSpam/pause (1.85s)

                                                
                                    
x
+
TestErrorSpam/unpause (1.79s)

                                                
                                                
=== RUN   TestErrorSpam/unpause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 unpause
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 unpause
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 unpause
--- PASS: TestErrorSpam/unpause (1.79s)

                                                
                                    
x
+
TestErrorSpam/stop (1.48s)

                                                
                                                
=== RUN   TestErrorSpam/stop
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 stop
error_spam_test.go:159: (dbg) Done: out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 stop: (1.263546137s)
error_spam_test.go:159: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 stop
error_spam_test.go:182: (dbg) Run:  out/minikube-linux-arm64 -p nospam-376105 --log_dir /tmp/nospam-376105 stop
--- PASS: TestErrorSpam/stop (1.48s)

                                                
                                    
x
+
TestFunctional/serial/CopySyncFile (0s)

                                                
                                                
=== RUN   TestFunctional/serial/CopySyncFile
functional_test.go:1872: local sync path: /home/jenkins/minikube-integration/20535-2262/.minikube/files/etc/test/nested/copy/7572/hosts
--- PASS: TestFunctional/serial/CopySyncFile (0.00s)

                                                
                                    
x
+
TestFunctional/serial/StartWithProxy (52.28s)

                                                
                                                
=== RUN   TestFunctional/serial/StartWithProxy
functional_test.go:2251: (dbg) Run:  out/minikube-linux-arm64 start -p functional-703463 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=containerd
functional_test.go:2251: (dbg) Done: out/minikube-linux-arm64 start -p functional-703463 --memory=4000 --apiserver-port=8441 --wait=all --driver=docker  --container-runtime=containerd: (52.282896222s)
--- PASS: TestFunctional/serial/StartWithProxy (52.28s)

                                                
                                    
x
+
TestFunctional/serial/AuditLog (0s)

                                                
                                                
=== RUN   TestFunctional/serial/AuditLog
--- PASS: TestFunctional/serial/AuditLog (0.00s)

                                                
                                    
x
+
TestFunctional/serial/SoftStart (6.32s)

                                                
                                                
=== RUN   TestFunctional/serial/SoftStart
I0317 10:34:10.144952    7572 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
functional_test.go:676: (dbg) Run:  out/minikube-linux-arm64 start -p functional-703463 --alsologtostderr -v=8
functional_test.go:676: (dbg) Done: out/minikube-linux-arm64 start -p functional-703463 --alsologtostderr -v=8: (6.320674973s)
functional_test.go:680: soft start took 6.324690689s for "functional-703463" cluster.
I0317 10:34:16.465939    7572 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
--- PASS: TestFunctional/serial/SoftStart (6.32s)

                                                
                                    
x
+
TestFunctional/serial/KubeContext (0.07s)

                                                
                                                
=== RUN   TestFunctional/serial/KubeContext
functional_test.go:698: (dbg) Run:  kubectl config current-context
--- PASS: TestFunctional/serial/KubeContext (0.07s)

                                                
                                    
x
+
TestFunctional/serial/KubectlGetPods (0.13s)

                                                
                                                
=== RUN   TestFunctional/serial/KubectlGetPods
functional_test.go:713: (dbg) Run:  kubectl --context functional-703463 get po -A
--- PASS: TestFunctional/serial/KubectlGetPods (0.13s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_remote (4.26s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_remote
functional_test.go:1066: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cache add registry.k8s.io/pause:3.1
E0317 10:34:17.245450    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:17.252510    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:17.263983    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:17.285437    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:17.326955    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:17.408394    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:17.569884    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:17.891574    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
functional_test.go:1066: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 cache add registry.k8s.io/pause:3.1: (1.65697605s)
functional_test.go:1066: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cache add registry.k8s.io/pause:3.3
E0317 10:34:18.533831    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
functional_test.go:1066: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 cache add registry.k8s.io/pause:3.3: (1.356684015s)
functional_test.go:1066: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cache add registry.k8s.io/pause:latest
E0317 10:34:19.815794    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
functional_test.go:1066: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 cache add registry.k8s.io/pause:latest: (1.243749896s)
--- PASS: TestFunctional/serial/CacheCmd/cache/add_remote (4.26s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_local (1.25s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_local
functional_test.go:1094: (dbg) Run:  docker build -t minikube-local-cache-test:functional-703463 /tmp/TestFunctionalserialCacheCmdcacheadd_local211131832/001
functional_test.go:1106: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cache add minikube-local-cache-test:functional-703463
functional_test.go:1111: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cache delete minikube-local-cache-test:functional-703463
functional_test.go:1100: (dbg) Run:  docker rmi minikube-local-cache-test:functional-703463
--- PASS: TestFunctional/serial/CacheCmd/cache/add_local (1.25s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/CacheDelete (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/CacheDelete
functional_test.go:1119: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.3
--- PASS: TestFunctional/serial/CacheCmd/cache/CacheDelete (0.05s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/list (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/list
functional_test.go:1127: (dbg) Run:  out/minikube-linux-arm64 cache list
--- PASS: TestFunctional/serial/CacheCmd/cache/list (0.05s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.33s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node
functional_test.go:1141: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh sudo crictl images
E0317 10:34:22.377728    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
--- PASS: TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (0.33s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/cache_reload (2.03s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/cache_reload
functional_test.go:1164: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh sudo crictl rmi registry.k8s.io/pause:latest
functional_test.go:1170: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh sudo crictl inspecti registry.k8s.io/pause:latest
functional_test.go:1170: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh sudo crictl inspecti registry.k8s.io/pause:latest: exit status 1 (283.122944ms)

                                                
                                                
-- stdout --
	FATA[0000] no such image "registry.k8s.io/pause:latest" present 

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:1175: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cache reload
functional_test.go:1175: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 cache reload: (1.130349644s)
functional_test.go:1180: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh sudo crictl inspecti registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/cache_reload (2.03s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/delete (0.11s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/delete
functional_test.go:1189: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:3.1
functional_test.go:1189: (dbg) Run:  out/minikube-linux-arm64 cache delete registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/delete (0.11s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmd (0.14s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmd
functional_test.go:733: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 kubectl -- --context functional-703463 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmd (0.14s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmdDirectly (0.13s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmdDirectly
functional_test.go:758: (dbg) Run:  out/kubectl --context functional-703463 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmdDirectly (0.13s)

                                                
                                    
x
+
TestFunctional/serial/ExtraConfig (43.75s)

                                                
                                                
=== RUN   TestFunctional/serial/ExtraConfig
functional_test.go:774: (dbg) Run:  out/minikube-linux-arm64 start -p functional-703463 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all
E0317 10:34:27.499015    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:37.740321    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:34:58.221788    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
functional_test.go:774: (dbg) Done: out/minikube-linux-arm64 start -p functional-703463 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all: (43.753258561s)
functional_test.go:778: restart took 43.753367674s for "functional-703463" cluster.
I0317 10:35:08.763310    7572 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
--- PASS: TestFunctional/serial/ExtraConfig (43.75s)

                                                
                                    
x
+
TestFunctional/serial/ComponentHealth (0.09s)

                                                
                                                
=== RUN   TestFunctional/serial/ComponentHealth
functional_test.go:827: (dbg) Run:  kubectl --context functional-703463 get po -l tier=control-plane -n kube-system -o=json
functional_test.go:842: etcd phase: Running
functional_test.go:852: etcd status: Ready
functional_test.go:842: kube-apiserver phase: Running
functional_test.go:852: kube-apiserver status: Ready
functional_test.go:842: kube-controller-manager phase: Running
functional_test.go:852: kube-controller-manager status: Ready
functional_test.go:842: kube-scheduler phase: Running
functional_test.go:852: kube-scheduler status: Ready
--- PASS: TestFunctional/serial/ComponentHealth (0.09s)

                                                
                                    
x
+
TestFunctional/serial/LogsCmd (1.74s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsCmd
functional_test.go:1253: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 logs
functional_test.go:1253: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 logs: (1.740337011s)
--- PASS: TestFunctional/serial/LogsCmd (1.74s)

                                                
                                    
x
+
TestFunctional/serial/LogsFileCmd (1.72s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsFileCmd
functional_test.go:1267: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 logs --file /tmp/TestFunctionalserialLogsFileCmd1901408597/001/logs.txt
functional_test.go:1267: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 logs --file /tmp/TestFunctionalserialLogsFileCmd1901408597/001/logs.txt: (1.717106181s)
--- PASS: TestFunctional/serial/LogsFileCmd (1.72s)

                                                
                                    
x
+
TestFunctional/serial/InvalidService (4.24s)

                                                
                                                
=== RUN   TestFunctional/serial/InvalidService
functional_test.go:2338: (dbg) Run:  kubectl --context functional-703463 apply -f testdata/invalidsvc.yaml
functional_test.go:2352: (dbg) Run:  out/minikube-linux-arm64 service invalid-svc -p functional-703463
functional_test.go:2352: (dbg) Non-zero exit: out/minikube-linux-arm64 service invalid-svc -p functional-703463: exit status 115 (377.181614ms)

                                                
                                                
-- stdout --
	|-----------|-------------|-------------|---------------------------|
	| NAMESPACE |    NAME     | TARGET PORT |            URL            |
	|-----------|-------------|-------------|---------------------------|
	| default   | invalid-svc |          80 | http://192.168.49.2:31144 |
	|-----------|-------------|-------------|---------------------------|
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to SVC_UNREACHABLE: service not available: no running pod for service invalid-svc found
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│    * Please also attach the following file to the GitHub issue:                             │
	│    * - /tmp/minikube_service_96b204199e3191fa1740d4430b018a3c8028d52d_0.log                 │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
functional_test.go:2344: (dbg) Run:  kubectl --context functional-703463 delete -f testdata/invalidsvc.yaml
--- PASS: TestFunctional/serial/InvalidService (4.24s)

                                                
                                    
x
+
TestFunctional/parallel/ConfigCmd (0.46s)

                                                
                                                
=== RUN   TestFunctional/parallel/ConfigCmd
=== PAUSE TestFunctional/parallel/ConfigCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1216: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 config unset cpus
functional_test.go:1216: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 config get cpus
functional_test.go:1216: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 config get cpus: exit status 14 (77.149051ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
functional_test.go:1216: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 config set cpus 2
functional_test.go:1216: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 config get cpus
functional_test.go:1216: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 config unset cpus
functional_test.go:1216: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 config get cpus
functional_test.go:1216: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 config get cpus: exit status 14 (62.654698ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/ConfigCmd (0.46s)

                                                
                                    
x
+
TestFunctional/parallel/DashboardCmd (10.4s)

                                                
                                                
=== RUN   TestFunctional/parallel/DashboardCmd
=== PAUSE TestFunctional/parallel/DashboardCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DashboardCmd
functional_test.go:922: (dbg) daemon: [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-703463 --alsologtostderr -v=1]
functional_test.go:927: (dbg) stopping [out/minikube-linux-arm64 dashboard --url --port 36195 -p functional-703463 --alsologtostderr -v=1] ...
helpers_test.go:508: unable to kill pid 44688: os: process already finished
--- PASS: TestFunctional/parallel/DashboardCmd (10.40s)

                                                
                                    
x
+
TestFunctional/parallel/DryRun (0.49s)

                                                
                                                
=== RUN   TestFunctional/parallel/DryRun
=== PAUSE TestFunctional/parallel/DryRun

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DryRun
functional_test.go:991: (dbg) Run:  out/minikube-linux-arm64 start -p functional-703463 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd
functional_test.go:991: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-703463 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd: exit status 23 (210.835336ms)

                                                
                                                
-- stdout --
	* [functional-703463] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=20535
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Using the docker driver based on existing profile
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0317 10:35:54.215764   44376 out.go:345] Setting OutFile to fd 1 ...
	I0317 10:35:54.215964   44376 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:35:54.215978   44376 out.go:358] Setting ErrFile to fd 2...
	I0317 10:35:54.215984   44376 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:35:54.216276   44376 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 10:35:54.216676   44376 out.go:352] Setting JSON to false
	I0317 10:35:54.217700   44376 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":1100,"bootTime":1742206655,"procs":206,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1077-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0317 10:35:54.217768   44376 start.go:139] virtualization:  
	I0317 10:35:54.221004   44376 out.go:177] * [functional-703463] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	I0317 10:35:54.224707   44376 out.go:177]   - MINIKUBE_LOCATION=20535
	I0317 10:35:54.224812   44376 notify.go:220] Checking for updates...
	I0317 10:35:54.230380   44376 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0317 10:35:54.233891   44376 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:35:54.236864   44376 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	I0317 10:35:54.239847   44376 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0317 10:35:54.242848   44376 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0317 10:35:54.246399   44376 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:35:54.247239   44376 driver.go:394] Setting default libvirt URI to qemu:///system
	I0317 10:35:54.287991   44376 docker.go:123] docker version: linux-28.0.1:Docker Engine - Community
	I0317 10:35:54.288122   44376 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:35:54.352298   44376 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:36 OomKillDisable:true NGoroutines:53 SystemTime:2025-03-17 10:35:54.343118444 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:35:54.352412   44376 docker.go:318] overlay module found
	I0317 10:35:54.355653   44376 out.go:177] * Using the docker driver based on existing profile
	I0317 10:35:54.358407   44376 start.go:297] selected driver: docker
	I0317 10:35:54.358431   44376 start.go:901] validating driver "docker" against &{Name:functional-703463 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:functional-703463 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 10:35:54.358535   44376 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0317 10:35:54.363278   44376 out.go:201] 
	W0317 10:35:54.366208   44376 out.go:270] X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	X Exiting due to RSRC_INSUFFICIENT_REQ_MEMORY: Requested memory allocation 250MiB is less than the usable minimum of 1800MB
	I0317 10:35:54.369004   44376 out.go:201] 

                                                
                                                
** /stderr **
functional_test.go:1008: (dbg) Run:  out/minikube-linux-arm64 start -p functional-703463 --dry-run --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
--- PASS: TestFunctional/parallel/DryRun (0.49s)

                                                
                                    
x
+
TestFunctional/parallel/InternationalLanguage (0.21s)

                                                
                                                
=== RUN   TestFunctional/parallel/InternationalLanguage
=== PAUSE TestFunctional/parallel/InternationalLanguage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/InternationalLanguage
functional_test.go:1037: (dbg) Run:  out/minikube-linux-arm64 start -p functional-703463 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd
functional_test.go:1037: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p functional-703463 --dry-run --memory 250MB --alsologtostderr --driver=docker  --container-runtime=containerd: exit status 23 (206.431792ms)

                                                
                                                
-- stdout --
	* [functional-703463] minikube v1.35.0 sur Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=20535
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	* Utilisation du pilote docker basé sur le profil existant
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0317 10:35:54.011939   44331 out.go:345] Setting OutFile to fd 1 ...
	I0317 10:35:54.012573   44331 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:35:54.012586   44331 out.go:358] Setting ErrFile to fd 2...
	I0317 10:35:54.012593   44331 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:35:54.013042   44331 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 10:35:54.013484   44331 out.go:352] Setting JSON to false
	I0317 10:35:54.014500   44331 start.go:129] hostinfo: {"hostname":"ip-172-31-30-239","uptime":1099,"bootTime":1742206655,"procs":206,"os":"linux","platform":"ubuntu","platformFamily":"debian","platformVersion":"20.04","kernelVersion":"5.15.0-1077-aws","kernelArch":"aarch64","virtualizationSystem":"","virtualizationRole":"","hostId":"92f46a7d-c249-4c12-924a-77f64874c910"}
	I0317 10:35:54.014584   44331 start.go:139] virtualization:  
	I0317 10:35:54.018167   44331 out.go:177] * [functional-703463] minikube v1.35.0 sur Ubuntu 20.04 (arm64)
	I0317 10:35:54.022064   44331 out.go:177]   - MINIKUBE_LOCATION=20535
	I0317 10:35:54.022166   44331 notify.go:220] Checking for updates...
	I0317 10:35:54.028088   44331 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0317 10:35:54.031034   44331 out.go:177]   - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	I0317 10:35:54.034201   44331 out.go:177]   - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	I0317 10:35:54.037172   44331 out.go:177]   - MINIKUBE_BIN=out/minikube-linux-arm64
	I0317 10:35:54.040196   44331 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0317 10:35:54.043648   44331 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:35:54.044330   44331 driver.go:394] Setting default libvirt URI to qemu:///system
	I0317 10:35:54.068504   44331 docker.go:123] docker version: linux-28.0.1:Docker Engine - Community
	I0317 10:35:54.068636   44331 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:35:54.141828   44331 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:1 ContainersRunning:1 ContainersPaused:0 ContainersStopped:0 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:36 OomKillDisable:true NGoroutines:53 SystemTime:2025-03-17 10:35:54.132522903 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:35:54.141936   44331 docker.go:318] overlay module found
	I0317 10:35:54.145112   44331 out.go:177] * Utilisation du pilote docker basé sur le profil existant
	I0317 10:35:54.148005   44331 start.go:297] selected driver: docker
	I0317 10:35:54.148031   44331 start.go:901] validating driver "docker" against &{Name:functional-703463 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1741860993-20523@sha256:cd976907fa4d517c84fff1e5ef773b9fb3c738c4e1ded824ea5133470a66e185 Memory:4000 CPUs:2 DiskSize:20000 Driver:docker HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:functional-703463 Namespace:default APIServerHAVIP: APIServerNa
me:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:containerd CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:192.168.49.2 Port:8441 KubernetesVersion:v1.32.2 ContainerRuntime:containerd ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:/home/jenkins:/minikube-host Mount9PVersion:9p2000.L MountGID:do
cker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0317 10:35:54.148145   44331 start.go:912] status for docker: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0317 10:35:54.151790   44331 out.go:201] 
	W0317 10:35:54.154905   44331 out.go:270] X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	X Fermeture en raison de RSRC_INSUFFICIENT_REQ_MEMORY : L'allocation de mémoire demandée 250 Mio est inférieure au minimum utilisable de 1800 Mo
	I0317 10:35:54.157856   44331 out.go:201] 

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/InternationalLanguage (0.21s)

                                                
                                    
x
+
TestFunctional/parallel/StatusCmd (1.09s)

                                                
                                                
=== RUN   TestFunctional/parallel/StatusCmd
=== PAUSE TestFunctional/parallel/StatusCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/StatusCmd
functional_test.go:871: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 status
functional_test.go:877: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}
functional_test.go:889: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 status -o json
--- PASS: TestFunctional/parallel/StatusCmd (1.09s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmdConnect (10.65s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmdConnect
=== PAUSE TestFunctional/parallel/ServiceCmdConnect

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmdConnect
functional_test.go:1644: (dbg) Run:  kubectl --context functional-703463 create deployment hello-node-connect --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1652: (dbg) Run:  kubectl --context functional-703463 expose deployment hello-node-connect --type=NodePort --port=8080
functional_test.go:1657: (dbg) TestFunctional/parallel/ServiceCmdConnect: waiting 10m0s for pods matching "app=hello-node-connect" in namespace "default" ...
helpers_test.go:344: "hello-node-connect-8449669db6-djb9t" [d1e10c99-566e-40fa-ac19-f95b79e7bd1f] Pending / Ready:ContainersNotReady (containers with unready status: [echoserver-arm]) / ContainersReady:ContainersNotReady (containers with unready status: [echoserver-arm])
helpers_test.go:344: "hello-node-connect-8449669db6-djb9t" [d1e10c99-566e-40fa-ac19-f95b79e7bd1f] Running
functional_test.go:1657: (dbg) TestFunctional/parallel/ServiceCmdConnect: app=hello-node-connect healthy within 10.003778218s
functional_test.go:1666: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 service hello-node-connect --url
functional_test.go:1672: found endpoint for hello-node-connect: http://192.168.49.2:31878
functional_test.go:1692: http://192.168.49.2:31878: success! body:

                                                
                                                

                                                
                                                
Hostname: hello-node-connect-8449669db6-djb9t

                                                
                                                
Pod Information:
	-no pod information available-

                                                
                                                
Server values:
	server_version=nginx: 1.13.3 - lua: 10008

                                                
                                                
Request Information:
	client_address=10.244.0.1
	method=GET
	real path=/
	query=
	request_version=1.1
	request_uri=http://192.168.49.2:8080/

                                                
                                                
Request Headers:
	accept-encoding=gzip
	host=192.168.49.2:31878
	user-agent=Go-http-client/1.1

                                                
                                                
Request Body:
	-no body in request-

                                                
                                                
--- PASS: TestFunctional/parallel/ServiceCmdConnect (10.65s)

                                                
                                    
x
+
TestFunctional/parallel/AddonsCmd (0.15s)

                                                
                                                
=== RUN   TestFunctional/parallel/AddonsCmd
=== PAUSE TestFunctional/parallel/AddonsCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/AddonsCmd
functional_test.go:1707: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 addons list
functional_test.go:1719: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 addons list -o json
--- PASS: TestFunctional/parallel/AddonsCmd (0.15s)

                                                
                                    
x
+
TestFunctional/parallel/PersistentVolumeClaim (25.47s)

                                                
                                                
=== RUN   TestFunctional/parallel/PersistentVolumeClaim
=== PAUSE TestFunctional/parallel/PersistentVolumeClaim

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 4m0s for pods matching "integration-test=storage-provisioner" in namespace "kube-system" ...
helpers_test.go:344: "storage-provisioner" [6ab57670-d663-4525-bb3c-b0cf35d08509] Running
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: integration-test=storage-provisioner healthy within 5.003841971s
functional_test_pvc_test.go:49: (dbg) Run:  kubectl --context functional-703463 get storageclass -o=json
functional_test_pvc_test.go:69: (dbg) Run:  kubectl --context functional-703463 apply -f testdata/storage-provisioner/pvc.yaml
functional_test_pvc_test.go:76: (dbg) Run:  kubectl --context functional-703463 get pvc myclaim -o=json
functional_test_pvc_test.go:125: (dbg) Run:  kubectl --context functional-703463 apply -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 3m0s for pods matching "test=storage-provisioner" in namespace "default" ...
helpers_test.go:344: "sp-pod" [4f83b9c2-caf6-4f03-92b0-3b2e5ab79283] Pending
helpers_test.go:344: "sp-pod" [4f83b9c2-caf6-4f03-92b0-3b2e5ab79283] Pending / Ready:ContainersNotReady (containers with unready status: [myfrontend]) / ContainersReady:ContainersNotReady (containers with unready status: [myfrontend])
helpers_test.go:344: "sp-pod" [4f83b9c2-caf6-4f03-92b0-3b2e5ab79283] Running
E0317 10:35:39.183195    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: test=storage-provisioner healthy within 11.003819971s
functional_test_pvc_test.go:100: (dbg) Run:  kubectl --context functional-703463 exec sp-pod -- touch /tmp/mount/foo
functional_test_pvc_test.go:106: (dbg) Run:  kubectl --context functional-703463 delete -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:106: (dbg) Done: kubectl --context functional-703463 delete -f testdata/storage-provisioner/pod.yaml: (1.247485303s)
functional_test_pvc_test.go:125: (dbg) Run:  kubectl --context functional-703463 apply -f testdata/storage-provisioner/pod.yaml
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 3m0s for pods matching "test=storage-provisioner" in namespace "default" ...
helpers_test.go:344: "sp-pod" [0e7bd386-f745-4a3c-a5e9-99fae7ec0039] Pending
helpers_test.go:344: "sp-pod" [0e7bd386-f745-4a3c-a5e9-99fae7ec0039] Pending / Ready:ContainersNotReady (containers with unready status: [myfrontend]) / ContainersReady:ContainersNotReady (containers with unready status: [myfrontend])
helpers_test.go:344: "sp-pod" [0e7bd386-f745-4a3c-a5e9-99fae7ec0039] Running
functional_test_pvc_test.go:130: (dbg) TestFunctional/parallel/PersistentVolumeClaim: test=storage-provisioner healthy within 7.004306628s
functional_test_pvc_test.go:114: (dbg) Run:  kubectl --context functional-703463 exec sp-pod -- ls /tmp/mount
--- PASS: TestFunctional/parallel/PersistentVolumeClaim (25.47s)

                                                
                                    
x
+
TestFunctional/parallel/SSHCmd (0.72s)

                                                
                                                
=== RUN   TestFunctional/parallel/SSHCmd
=== PAUSE TestFunctional/parallel/SSHCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/SSHCmd
functional_test.go:1742: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "echo hello"
functional_test.go:1759: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "cat /etc/hostname"
--- PASS: TestFunctional/parallel/SSHCmd (0.72s)

                                                
                                    
x
+
TestFunctional/parallel/CpCmd (2.16s)

                                                
                                                
=== RUN   TestFunctional/parallel/CpCmd
=== PAUSE TestFunctional/parallel/CpCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CpCmd
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cp testdata/cp-test.txt /home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh -n functional-703463 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cp functional-703463:/home/docker/cp-test.txt /tmp/TestFunctionalparallelCpCmd2641959974/001/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh -n functional-703463 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 cp testdata/cp-test.txt /tmp/does/not/exist/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh -n functional-703463 "sudo cat /tmp/does/not/exist/cp-test.txt"
--- PASS: TestFunctional/parallel/CpCmd (2.16s)

                                                
                                    
x
+
TestFunctional/parallel/FileSync (0.32s)

                                                
                                                
=== RUN   TestFunctional/parallel/FileSync
=== PAUSE TestFunctional/parallel/FileSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1946: Checking for existence of /etc/test/nested/copy/7572/hosts within VM
functional_test.go:1948: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo cat /etc/test/nested/copy/7572/hosts"
functional_test.go:1953: file sync test content: Test file for checking file sync process
--- PASS: TestFunctional/parallel/FileSync (0.32s)

                                                
                                    
x
+
TestFunctional/parallel/CertSync (2.03s)

                                                
                                                
=== RUN   TestFunctional/parallel/CertSync
=== PAUSE TestFunctional/parallel/CertSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1989: Checking for existence of /etc/ssl/certs/7572.pem within VM
functional_test.go:1990: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo cat /etc/ssl/certs/7572.pem"
functional_test.go:1989: Checking for existence of /usr/share/ca-certificates/7572.pem within VM
functional_test.go:1990: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo cat /usr/share/ca-certificates/7572.pem"
functional_test.go:1989: Checking for existence of /etc/ssl/certs/51391683.0 within VM
functional_test.go:1990: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo cat /etc/ssl/certs/51391683.0"
functional_test.go:2016: Checking for existence of /etc/ssl/certs/75722.pem within VM
functional_test.go:2017: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo cat /etc/ssl/certs/75722.pem"
functional_test.go:2016: Checking for existence of /usr/share/ca-certificates/75722.pem within VM
functional_test.go:2017: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo cat /usr/share/ca-certificates/75722.pem"
functional_test.go:2016: Checking for existence of /etc/ssl/certs/3ec20f2e.0 within VM
functional_test.go:2017: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo cat /etc/ssl/certs/3ec20f2e.0"
--- PASS: TestFunctional/parallel/CertSync (2.03s)

                                                
                                    
x
+
TestFunctional/parallel/NodeLabels (0.08s)

                                                
                                                
=== RUN   TestFunctional/parallel/NodeLabels
=== PAUSE TestFunctional/parallel/NodeLabels

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NodeLabels
functional_test.go:236: (dbg) Run:  kubectl --context functional-703463 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'"
--- PASS: TestFunctional/parallel/NodeLabels (0.08s)

                                                
                                    
x
+
TestFunctional/parallel/NonActiveRuntimeDisabled (0.78s)

                                                
                                                
=== RUN   TestFunctional/parallel/NonActiveRuntimeDisabled
=== PAUSE TestFunctional/parallel/NonActiveRuntimeDisabled

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NonActiveRuntimeDisabled
functional_test.go:2044: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo systemctl is-active docker"
functional_test.go:2044: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh "sudo systemctl is-active docker": exit status 1 (371.557771ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
functional_test.go:2044: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo systemctl is-active crio"
functional_test.go:2044: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh "sudo systemctl is-active crio": exit status 1 (408.866719ms)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/NonActiveRuntimeDisabled (0.78s)

                                                
                                    
x
+
TestFunctional/parallel/License (0.25s)

                                                
                                                
=== RUN   TestFunctional/parallel/License
=== PAUSE TestFunctional/parallel/License

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/License
functional_test.go:2305: (dbg) Run:  out/minikube-linux-arm64 license
--- PASS: TestFunctional/parallel/License (0.25s)

                                                
                                    
x
+
TestFunctional/parallel/Version/short (0.06s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/short
=== PAUSE TestFunctional/parallel/Version/short

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/short
functional_test.go:2273: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 version --short
--- PASS: TestFunctional/parallel/Version/short (0.06s)

                                                
                                    
x
+
TestFunctional/parallel/Version/components (1.29s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/components
=== PAUSE TestFunctional/parallel/Version/components

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/components
functional_test.go:2287: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 version -o=json --components
2025/03/17 10:36:04 [DEBUG] GET http://127.0.0.1:36195/api/v1/namespaces/kubernetes-dashboard/services/http:kubernetes-dashboard:/proxy/
functional_test.go:2287: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 version -o=json --components: (1.292528086s)
--- PASS: TestFunctional/parallel/Version/components (1.29s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.54s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-703463 tunnel --alsologtostderr]
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-linux-arm64 -p functional-703463 tunnel --alsologtostderr]
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-703463 tunnel --alsologtostderr] ...
helpers_test.go:508: unable to kill pid 40881: os: process already finished
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-linux-arm64 -p functional-703463 tunnel --alsologtostderr] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (0.54s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListShort (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListShort
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListShort

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListShort
functional_test.go:278: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls --format short --alsologtostderr
functional_test.go:283: (dbg) Stdout: out/minikube-linux-arm64 -p functional-703463 image ls --format short --alsologtostderr:
registry.k8s.io/pause:latest
registry.k8s.io/pause:3.3
registry.k8s.io/pause:3.10
registry.k8s.io/pause:3.1
registry.k8s.io/kube-scheduler:v1.32.2
registry.k8s.io/kube-proxy:v1.32.2
registry.k8s.io/kube-controller-manager:v1.32.2
registry.k8s.io/kube-apiserver:v1.32.2
registry.k8s.io/etcd:3.5.16-0
registry.k8s.io/echoserver-arm:1.8
registry.k8s.io/coredns/coredns:v1.11.3
gcr.io/k8s-minikube/storage-provisioner:v5
gcr.io/k8s-minikube/busybox:1.28.4-glibc
docker.io/library/nginx:latest
docker.io/library/nginx:alpine
docker.io/library/minikube-local-cache-test:functional-703463
docker.io/kindest/kindnetd:v20250214-acbabc1a
docker.io/kindest/kindnetd:v20241212-9f82dd49
docker.io/kicbase/echo-server:functional-703463
functional_test.go:286: (dbg) Stderr: out/minikube-linux-arm64 -p functional-703463 image ls --format short --alsologtostderr:
I0317 10:36:05.104003   45997 out.go:345] Setting OutFile to fd 1 ...
I0317 10:36:05.104218   45997 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.104231   45997 out.go:358] Setting ErrFile to fd 2...
I0317 10:36:05.104237   45997 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.104524   45997 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
I0317 10:36:05.105195   45997 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.105325   45997 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.105772   45997 cli_runner.go:164] Run: docker container inspect functional-703463 --format={{.State.Status}}
I0317 10:36:05.131360   45997 ssh_runner.go:195] Run: systemctl --version
I0317 10:36:05.131418   45997 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-703463
I0317 10:36:05.152657   45997 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32783 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/functional-703463/id_rsa Username:docker}
I0317 10:36:05.257471   45997 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListShort (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListTable (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListTable
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListTable

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListTable
functional_test.go:278: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls --format table --alsologtostderr
functional_test.go:283: (dbg) Stdout: out/minikube-linux-arm64 -p functional-703463 image ls --format table --alsologtostderr:
|---------------------------------------------|--------------------|---------------|--------|
|                    Image                    |        Tag         |   Image ID    |  Size  |
|---------------------------------------------|--------------------|---------------|--------|
| registry.k8s.io/kube-controller-manager     | v1.32.2            | sha256:3c9285 | 24MB   |
| registry.k8s.io/kube-scheduler              | v1.32.2            | sha256:82dfa0 | 18.9MB |
| registry.k8s.io/pause                       | latest             | sha256:8cb209 | 71.3kB |
| docker.io/library/nginx                     | alpine             | sha256:cedb66 | 21.7MB |
| gcr.io/k8s-minikube/storage-provisioner     | v5                 | sha256:ba04bb | 8.03MB |
| registry.k8s.io/coredns/coredns             | v1.11.3            | sha256:2f6c96 | 16.9MB |
| registry.k8s.io/pause                       | 3.3                | sha256:3d1873 | 249kB  |
| docker.io/kicbase/echo-server               | functional-703463  | sha256:ce2d2c | 2.17MB |
| docker.io/library/nginx                     | latest             | sha256:678546 | 68.6MB |
| registry.k8s.io/etcd                        | 3.5.16-0           | sha256:7fc9d4 | 67.9MB |
| registry.k8s.io/kube-proxy                  | v1.32.2            | sha256:e5aac5 | 27.4MB |
| registry.k8s.io/pause                       | 3.1                | sha256:8057e0 | 262kB  |
| docker.io/kindest/kindnetd                  | v20241212-9f82dd49 | sha256:e1181e | 35.7MB |
| gcr.io/k8s-minikube/busybox                 | 1.28.4-glibc       | sha256:1611cd | 1.94MB |
| registry.k8s.io/echoserver-arm              | 1.8                | sha256:72565b | 45.3MB |
| registry.k8s.io/pause                       | 3.10               | sha256:afb617 | 268kB  |
| docker.io/kindest/kindnetd                  | v20250214-acbabc1a | sha256:ee75e2 | 35.7MB |
| docker.io/library/minikube-local-cache-test | functional-703463  | sha256:d9538b | 993B   |
| registry.k8s.io/kube-apiserver              | v1.32.2            | sha256:6417e1 | 26.2MB |
|---------------------------------------------|--------------------|---------------|--------|
functional_test.go:286: (dbg) Stderr: out/minikube-linux-arm64 -p functional-703463 image ls --format table --alsologtostderr:
I0317 10:36:06.120971   46232 out.go:345] Setting OutFile to fd 1 ...
I0317 10:36:06.121264   46232 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:06.121278   46232 out.go:358] Setting ErrFile to fd 2...
I0317 10:36:06.121283   46232 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:06.121529   46232 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
I0317 10:36:06.122150   46232 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:06.122258   46232 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:06.122857   46232 cli_runner.go:164] Run: docker container inspect functional-703463 --format={{.State.Status}}
I0317 10:36:06.149328   46232 ssh_runner.go:195] Run: systemctl --version
I0317 10:36:06.149391   46232 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-703463
I0317 10:36:06.168603   46232 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32783 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/functional-703463/id_rsa Username:docker}
I0317 10:36:06.259175   46232 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListTable (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListJson (0.28s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListJson
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListJson

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListJson
functional_test.go:278: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls --format json --alsologtostderr
functional_test.go:283: (dbg) Stdout: out/minikube-linux-arm64 -p functional-703463 image ls --format json --alsologtostderr:
[{"id":"sha256:1611cd07b61d57dbbfebe6db242513fd51e1c02d20ba08af17a45837d86a8a8c","repoDigests":["gcr.io/k8s-minikube/busybox@sha256:2d03e6ceeb99250061dd110530b0ece7998cd84121f952adef120ea7c5a6f00e"],"repoTags":["gcr.io/k8s-minikube/busybox:1.28.4-glibc"],"size":"1935750"},{"id":"sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6","repoDigests":["gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944"],"repoTags":["gcr.io/k8s-minikube/storage-provisioner:v5"],"size":"8034419"},{"id":"sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4","repoDigests":["registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e"],"repoTags":["registry.k8s.io/coredns/coredns:v1.11.3"],"size":"16948420"},{"id":"sha256:8057e0500773a37cde2cff041eb13ebd68c748419a2fbfd1dfb5bf38696cc8e5","repoDigests":[],"repoTags":["registry.k8s.io/pause:3.1"],"size":"262191"},{"id":"sha256:8cb2091f603e
75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a","repoDigests":[],"repoTags":["registry.k8s.io/pause:latest"],"size":"71300"},{"id":"sha256:ee75e27fff91c8d59835f9a3efdf968ff404e580bad69746a65bcf3e304ab26f","repoDigests":["docker.io/kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495"],"repoTags":["docker.io/kindest/kindnetd:v20250214-acbabc1a"],"size":"35677907"},{"id":"sha256:e5aac5df76d9b8dc899ab8c4db25a7648e7fb25cafe7a155066247883c78f062","repoDigests":["registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d"],"repoTags":["registry.k8s.io/kube-proxy:v1.32.2"],"size":"27362401"},{"id":"sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8","repoDigests":["registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a"],"repoTags":["registry.k8s.io/pause:3.10"],"size":"267933"},{"id":"sha256:20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8","repoDigests":["doc
ker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93"],"repoTags":[],"size":"74084559"},{"id":"sha256:e1181ee320546c66f17956a302db1b7899d88a593f116726718851133de588b6","repoDigests":["docker.io/kindest/kindnetd@sha256:56ea59f77258052c4506076525318ffa66817500f68e94a50fdf7d600a280d26"],"repoTags":["docker.io/kindest/kindnetd:v20241212-9f82dd49"],"size":"35679862"},{"id":"sha256:7fc9d4aa817aa6a3e549f3cd49d1f7b496407be979fc36dd5f356d59ce8c3a82","repoDigests":["registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5"],"repoTags":["registry.k8s.io/etcd:3.5.16-0"],"size":"67941650"},{"id":"sha256:3c9285acfd2ff7915bb451cc40ac060366ac519f3fef00c455f5aca0e0346c4d","repoDigests":["registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90"],"repoTags":["registry.k8s.io/kube-controller-manager:v1.32.2"],"size":"23968941"},{"id":"sha256:3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a
0fe0f77206a7300","repoDigests":[],"repoTags":["registry.k8s.io/pause:3.3"],"size":"249461"},{"id":"sha256:ce2d2cda2d858fdaea84129deb86d18e5dbf1c548f230b79fdca74cc91729d17","repoDigests":[],"repoTags":["docker.io/kicbase/echo-server:functional-703463"],"size":"2173567"},{"id":"sha256:d9538bca678118c8ffe73dc6a7f9541897e9ba6797bb5c51116791e5d0882852","repoDigests":[],"repoTags":["docker.io/library/minikube-local-cache-test:functional-703463"],"size":"993"},{"id":"sha256:cedb667e1a7b4e6d843a4f74f1f2db0dac1c29b43978aa72dbae2193e3b8eea3","repoDigests":["docker.io/library/nginx@sha256:4ff102c5d78d254a6f0da062b3cf39eaf07f01eec0927fd21e219d0af8bc0591"],"repoTags":["docker.io/library/nginx:alpine"],"size":"21684747"},{"id":"sha256:72565bf5bbedfb62e9d21afa2b1221b2c7a5e05b746dae33430bc550d3f87beb","repoDigests":["registry.k8s.io/echoserver-arm@sha256:b33d4cdf6ed097f4e9b77b135d83a596ab73c6268b0342648818eb85f5edfdb5"],"repoTags":["registry.k8s.io/echoserver-arm:1.8"],"size":"45324675"},{"id":"sha256:6417e1437b6d9a789e1ca78
9695a574e1df00a632bdbfbcae9695c9a7d500e32","repoDigests":["registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f"],"repoTags":["registry.k8s.io/kube-apiserver:v1.32.2"],"size":"26215036"},{"id":"sha256:82dfa03f692fb5d84f66c17d6ee9126b081182152b25d28ea456d89b7d5d8911","repoDigests":["registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76"],"repoTags":["registry.k8s.io/kube-scheduler:v1.32.2"],"size":"18921614"},{"id":"sha256:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a","repoDigests":["docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c"],"repoTags":[],"size":"18306114"},{"id":"sha256:678546cdd20cd5baaea6f534dbb7482fc9f2f8d24c1f3c53c0e747b699b849da","repoDigests":["docker.io/library/nginx@sha256:9d6b58feebd2dbd3c56ab5853333d627cc6e281011cfd6050fa4bcf2072c9496"],"repoTags":["docker.io/library/nginx:latest"],"size":"68638658"}]
functional_test.go:286: (dbg) Stderr: out/minikube-linux-arm64 -p functional-703463 image ls --format json --alsologtostderr:
I0317 10:36:05.830125   46175 out.go:345] Setting OutFile to fd 1 ...
I0317 10:36:05.830265   46175 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.830278   46175 out.go:358] Setting ErrFile to fd 2...
I0317 10:36:05.830284   46175 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.830550   46175 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
I0317 10:36:05.831276   46175 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.831437   46175 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.831936   46175 cli_runner.go:164] Run: docker container inspect functional-703463 --format={{.State.Status}}
I0317 10:36:05.858608   46175 ssh_runner.go:195] Run: systemctl --version
I0317 10:36:05.858657   46175 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-703463
I0317 10:36:05.880307   46175 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32783 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/functional-703463/id_rsa Username:docker}
I0317 10:36:05.980170   46175 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListJson (0.28s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListYaml (0.23s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListYaml
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListYaml

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListYaml
functional_test.go:278: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls --format yaml --alsologtostderr
functional_test.go:283: (dbg) Stdout: out/minikube-linux-arm64 -p functional-703463 image ls --format yaml --alsologtostderr:
- id: sha256:20b332c9a70d8516d849d1ac23eff5800cbb2f263d379f0ec11ee908db6b25a8
repoDigests:
- docker.io/kubernetesui/dashboard@sha256:2e500d29e9d5f4a086b908eb8dfe7ecac57d2ab09d65b24f588b1d449841ef93
repoTags: []
size: "74084559"
- id: sha256:ce2d2cda2d858fdaea84129deb86d18e5dbf1c548f230b79fdca74cc91729d17
repoDigests: []
repoTags:
- docker.io/kicbase/echo-server:functional-703463
size: "2173567"
- id: sha256:678546cdd20cd5baaea6f534dbb7482fc9f2f8d24c1f3c53c0e747b699b849da
repoDigests:
- docker.io/library/nginx@sha256:9d6b58feebd2dbd3c56ab5853333d627cc6e281011cfd6050fa4bcf2072c9496
repoTags:
- docker.io/library/nginx:latest
size: "68638658"
- id: sha256:1611cd07b61d57dbbfebe6db242513fd51e1c02d20ba08af17a45837d86a8a8c
repoDigests:
- gcr.io/k8s-minikube/busybox@sha256:2d03e6ceeb99250061dd110530b0ece7998cd84121f952adef120ea7c5a6f00e
repoTags:
- gcr.io/k8s-minikube/busybox:1.28.4-glibc
size: "1935750"
- id: sha256:ba04bb24b95753201135cbc420b233c1b0b9fa2e1fd21d28319c348c33fbcde6
repoDigests:
- gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944
repoTags:
- gcr.io/k8s-minikube/storage-provisioner:v5
size: "8034419"
- id: sha256:72565bf5bbedfb62e9d21afa2b1221b2c7a5e05b746dae33430bc550d3f87beb
repoDigests:
- registry.k8s.io/echoserver-arm@sha256:b33d4cdf6ed097f4e9b77b135d83a596ab73c6268b0342648818eb85f5edfdb5
repoTags:
- registry.k8s.io/echoserver-arm:1.8
size: "45324675"
- id: sha256:e5aac5df76d9b8dc899ab8c4db25a7648e7fb25cafe7a155066247883c78f062
repoDigests:
- registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d
repoTags:
- registry.k8s.io/kube-proxy:v1.32.2
size: "27362401"
- id: sha256:e1181ee320546c66f17956a302db1b7899d88a593f116726718851133de588b6
repoDigests:
- docker.io/kindest/kindnetd@sha256:56ea59f77258052c4506076525318ffa66817500f68e94a50fdf7d600a280d26
repoTags:
- docker.io/kindest/kindnetd:v20241212-9f82dd49
size: "35679862"
- id: sha256:ee75e27fff91c8d59835f9a3efdf968ff404e580bad69746a65bcf3e304ab26f
repoDigests:
- docker.io/kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495
repoTags:
- docker.io/kindest/kindnetd:v20250214-acbabc1a
size: "35677907"
- id: sha256:8057e0500773a37cde2cff041eb13ebd68c748419a2fbfd1dfb5bf38696cc8e5
repoDigests: []
repoTags:
- registry.k8s.io/pause:3.1
size: "262191"
- id: sha256:8cb2091f603e75187e2f6226c5901d12e00b1d1f778c6471ae4578e8a1c4724a
repoDigests: []
repoTags:
- registry.k8s.io/pause:latest
size: "71300"
- id: sha256:a422e0e982356f6c1cf0e5bb7b733363caae3992a07c99951fbcc73e58ed656a
repoDigests:
- docker.io/kubernetesui/metrics-scraper@sha256:76049887f07a0476dc93efc2d3569b9529bf982b22d29f356092ce206e98765c
repoTags: []
size: "18306114"
- id: sha256:2f6c962e7b8311337352d9fdea917da2184d9919f4da7695bc2a6517cf392fe4
repoDigests:
- registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e
repoTags:
- registry.k8s.io/coredns/coredns:v1.11.3
size: "16948420"
- id: sha256:7fc9d4aa817aa6a3e549f3cd49d1f7b496407be979fc36dd5f356d59ce8c3a82
repoDigests:
- registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5
repoTags:
- registry.k8s.io/etcd:3.5.16-0
size: "67941650"
- id: sha256:6417e1437b6d9a789e1ca789695a574e1df00a632bdbfbcae9695c9a7d500e32
repoDigests:
- registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f
repoTags:
- registry.k8s.io/kube-apiserver:v1.32.2
size: "26215036"
- id: sha256:3d18732f8686cc3c878055d99a05fa80289502fa496b36b6a0fe0f77206a7300
repoDigests: []
repoTags:
- registry.k8s.io/pause:3.3
size: "249461"
- id: sha256:3c9285acfd2ff7915bb451cc40ac060366ac519f3fef00c455f5aca0e0346c4d
repoDigests:
- registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90
repoTags:
- registry.k8s.io/kube-controller-manager:v1.32.2
size: "23968941"
- id: sha256:82dfa03f692fb5d84f66c17d6ee9126b081182152b25d28ea456d89b7d5d8911
repoDigests:
- registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76
repoTags:
- registry.k8s.io/kube-scheduler:v1.32.2
size: "18921614"
- id: sha256:d9538bca678118c8ffe73dc6a7f9541897e9ba6797bb5c51116791e5d0882852
repoDigests: []
repoTags:
- docker.io/library/minikube-local-cache-test:functional-703463
size: "993"
- id: sha256:cedb667e1a7b4e6d843a4f74f1f2db0dac1c29b43978aa72dbae2193e3b8eea3
repoDigests:
- docker.io/library/nginx@sha256:4ff102c5d78d254a6f0da062b3cf39eaf07f01eec0927fd21e219d0af8bc0591
repoTags:
- docker.io/library/nginx:alpine
size: "21684747"
- id: sha256:afb61768ce381961ca0beff95337601f29dc70ff3ed14e5e4b3e5699057e6aa8
repoDigests:
- registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a
repoTags:
- registry.k8s.io/pause:3.10
size: "267933"

                                                
                                                
functional_test.go:286: (dbg) Stderr: out/minikube-linux-arm64 -p functional-703463 image ls --format yaml --alsologtostderr:
I0317 10:36:05.372484   46075 out.go:345] Setting OutFile to fd 1 ...
I0317 10:36:05.372692   46075 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.372723   46075 out.go:358] Setting ErrFile to fd 2...
I0317 10:36:05.372742   46075 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.373031   46075 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
I0317 10:36:05.373684   46075 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.373857   46075 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.374342   46075 cli_runner.go:164] Run: docker container inspect functional-703463 --format={{.State.Status}}
I0317 10:36:05.393845   46075 ssh_runner.go:195] Run: systemctl --version
I0317 10:36:05.393899   46075 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-703463
I0317 10:36:05.415127   46075 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32783 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/functional-703463/id_rsa Username:docker}
I0317 10:36:05.503386   46075 ssh_runner.go:195] Run: sudo crictl images --output json
--- PASS: TestFunctional/parallel/ImageCommands/ImageListYaml (0.23s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageBuild (3.63s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageBuild
=== PAUSE TestFunctional/parallel/ImageCommands/ImageBuild

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageBuild
functional_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh pgrep buildkitd
functional_test.go:325: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh pgrep buildkitd: exit status 1 (296.72568ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:332: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image build -t localhost/my-image:functional-703463 testdata/build --alsologtostderr
functional_test.go:332: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 image build -t localhost/my-image:functional-703463 testdata/build --alsologtostderr: (3.09663698s)
functional_test.go:340: (dbg) Stderr: out/minikube-linux-arm64 -p functional-703463 image build -t localhost/my-image:functional-703463 testdata/build --alsologtostderr:
I0317 10:36:05.918375   46189 out.go:345] Setting OutFile to fd 1 ...
I0317 10:36:05.919100   46189 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.919172   46189 out.go:358] Setting ErrFile to fd 2...
I0317 10:36:05.919289   46189 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0317 10:36:05.919593   46189 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
I0317 10:36:05.920308   46189 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.925201   46189 config.go:182] Loaded profile config "functional-703463": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
I0317 10:36:05.925735   46189 cli_runner.go:164] Run: docker container inspect functional-703463 --format={{.State.Status}}
I0317 10:36:05.943631   46189 ssh_runner.go:195] Run: systemctl --version
I0317 10:36:05.943688   46189 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" functional-703463
I0317 10:36:05.960900   46189 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32783 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/functional-703463/id_rsa Username:docker}
I0317 10:36:06.061586   46189 build_images.go:161] Building image from path: /tmp/build.2724657291.tar
I0317 10:36:06.061677   46189 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build
I0317 10:36:06.076134   46189 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/build/build.2724657291.tar
I0317 10:36:06.080295   46189 ssh_runner.go:352] existence check for /var/lib/minikube/build/build.2724657291.tar: stat -c "%s %y" /var/lib/minikube/build/build.2724657291.tar: Process exited with status 1
stdout:

                                                
                                                
stderr:
stat: cannot statx '/var/lib/minikube/build/build.2724657291.tar': No such file or directory
I0317 10:36:06.080329   46189 ssh_runner.go:362] scp /tmp/build.2724657291.tar --> /var/lib/minikube/build/build.2724657291.tar (3072 bytes)
I0317 10:36:06.107716   46189 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build/build.2724657291
I0317 10:36:06.119801   46189 ssh_runner.go:195] Run: sudo tar -C /var/lib/minikube/build/build.2724657291 -xf /var/lib/minikube/build/build.2724657291.tar
I0317 10:36:06.130290   46189 containerd.go:394] Building image: /var/lib/minikube/build/build.2724657291
I0317 10:36:06.130362   46189 ssh_runner.go:195] Run: sudo buildctl build --frontend dockerfile.v0 --local context=/var/lib/minikube/build/build.2724657291 --local dockerfile=/var/lib/minikube/build/build.2724657291 --output type=image,name=localhost/my-image:functional-703463
#1 [internal] load build definition from Dockerfile
#1 transferring dockerfile: 97B done
#1 DONE 0.0s

                                                
                                                
#2 [internal] load metadata for gcr.io/k8s-minikube/busybox:latest
#2 DONE 1.1s

                                                
                                                
#3 [internal] load .dockerignore
#3 transferring context: 2B done
#3 DONE 0.0s

                                                
                                                
#4 [internal] load build context
#4 transferring context: 62B done
#4 DONE 0.0s

                                                
                                                
#5 [1/3] FROM gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b
#5 resolve gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b 0.0s done
#5 DONE 0.1s

                                                
                                                
#5 [1/3] FROM gcr.io/k8s-minikube/busybox:latest@sha256:ca5ae90100d50772da31f3b5016209e25ad61972404e2ccd83d44f10dee7e79b
#5 sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 0B / 828.50kB 0.2s
#5 sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 828.50kB / 828.50kB 0.3s done
#5 extracting sha256:a01966dde7f8d5ba10b6d87e776c7c8fb5a5f6bfa678874bd28b33b1fc6dba34 0.1s done
#5 DONE 0.4s

                                                
                                                
#6 [2/3] RUN true
#6 DONE 0.7s

                                                
                                                
#7 [3/3] ADD content.txt /
#7 DONE 0.0s

                                                
                                                
#8 exporting to image
#8 exporting layers 0.1s done
#8 exporting manifest sha256:43ce6f696924f796a310f875e48441a329a81deeba646de40dfe25bef08138d9 0.0s done
#8 exporting config sha256:a421e90c8a0bb5dac5f8f3593a127e14f1303dcd2bf3d81f8d3c3ef0c23150de 0.0s done
#8 naming to localhost/my-image:functional-703463 done
#8 DONE 0.2s
I0317 10:36:08.919504   46189 ssh_runner.go:235] Completed: sudo buildctl build --frontend dockerfile.v0 --local context=/var/lib/minikube/build/build.2724657291 --local dockerfile=/var/lib/minikube/build/build.2724657291 --output type=image,name=localhost/my-image:functional-703463: (2.789116832s)
I0317 10:36:08.919577   46189 ssh_runner.go:195] Run: sudo rm -rf /var/lib/minikube/build/build.2724657291
I0317 10:36:08.929466   46189 ssh_runner.go:195] Run: sudo rm -f /var/lib/minikube/build/build.2724657291.tar
I0317 10:36:08.938466   46189 build_images.go:217] Built localhost/my-image:functional-703463 from /tmp/build.2724657291.tar
I0317 10:36:08.938499   46189 build_images.go:133] succeeded building to: functional-703463
I0317 10:36:08.938504   46189 build_images.go:134] failed building to: 
functional_test.go:468: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageBuild (3.63s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/Setup (0.82s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/Setup
functional_test.go:359: (dbg) Run:  docker pull kicbase/echo-server:1.0
functional_test.go:364: (dbg) Run:  docker tag kicbase/echo-server:1.0 kicbase/echo-server:functional-703463
--- PASS: TestFunctional/parallel/ImageCommands/Setup (0.82s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/StartTunnel
functional_test_tunnel_test.go:129: (dbg) daemon: [out/minikube-linux-arm64 -p functional-703463 tunnel --alsologtostderr]
--- PASS: TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (9.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup
functional_test_tunnel_test.go:212: (dbg) Run:  kubectl --context functional-703463 apply -f testdata/testsvc.yaml
functional_test_tunnel_test.go:216: (dbg) TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup: waiting 4m0s for pods matching "run=nginx-svc" in namespace "default" ...
helpers_test.go:344: "nginx-svc" [48cca0a9-6cc9-46c2-9c0c-b8a73cf0e80f] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "nginx-svc" [48cca0a9-6cc9-46c2-9c0c-b8a73cf0e80f] Running
functional_test_tunnel_test.go:216: (dbg) TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup: run=nginx-svc healthy within 9.003800016s
I0317 10:35:28.744871    7572 kapi.go:150] Service nginx-svc in namespace default found.
--- PASS: TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (9.48s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadDaemon (1.39s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadDaemon
functional_test.go:372: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image load --daemon kicbase/echo-server:functional-703463 --alsologtostderr
functional_test.go:372: (dbg) Done: out/minikube-linux-arm64 -p functional-703463 image load --daemon kicbase/echo-server:functional-703463 --alsologtostderr: (1.127728807s)
functional_test.go:468: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageLoadDaemon (1.39s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageReloadDaemon (1.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageReloadDaemon
functional_test.go:382: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image load --daemon kicbase/echo-server:functional-703463 --alsologtostderr
functional_test.go:468: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageReloadDaemon (1.27s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (1.36s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon
functional_test.go:252: (dbg) Run:  docker pull kicbase/echo-server:latest
functional_test.go:257: (dbg) Run:  docker tag kicbase/echo-server:latest kicbase/echo-server:functional-703463
functional_test.go:262: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image load --daemon kicbase/echo-server:functional-703463 --alsologtostderr
functional_test.go:468: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (1.36s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.34s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveToFile
functional_test.go:397: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image save kicbase/echo-server:functional-703463 /home/jenkins/workspace/Docker_Linux_containerd_arm64/echo-server-save.tar --alsologtostderr
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveToFile (0.34s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageRemove (0.47s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageRemove
functional_test.go:409: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image rm kicbase/echo-server:functional-703463 --alsologtostderr
functional_test.go:468: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageRemove (0.47s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.7s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadFromFile
functional_test.go:426: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image load /home/jenkins/workspace/Docker_Linux_containerd_arm64/echo-server-save.tar --alsologtostderr
functional_test.go:468: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image ls
--- PASS: TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.70s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.41s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveDaemon
functional_test.go:436: (dbg) Run:  docker rmi kicbase/echo-server:functional-703463
functional_test.go:441: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 image save --daemon kicbase/echo-server:functional-703463 --alsologtostderr
functional_test.go:449: (dbg) Run:  docker image inspect kicbase/echo-server:functional-703463
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveDaemon (0.41s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP (0.08s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP
functional_test_tunnel_test.go:234: (dbg) Run:  kubectl --context functional-703463 get svc nginx-svc -o jsonpath={.status.loadBalancer.ingress[0].ip}
--- PASS: TestFunctional/parallel/TunnelCmd/serial/WaitService/IngressIP (0.08s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessDirect
functional_test_tunnel_test.go:299: tunnel at http://10.103.88.128 is working!
--- PASS: TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel
functional_test_tunnel_test.go:434: (dbg) stopping [out/minikube-linux-arm64 -p functional-703463 tunnel --alsologtostderr] ...
functional_test_tunnel_test.go:437: failed to stop process: signal: terminated
--- PASS: TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/DeployApp (7.25s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/DeployApp
functional_test.go:1454: (dbg) Run:  kubectl --context functional-703463 create deployment hello-node --image=registry.k8s.io/echoserver-arm:1.8
functional_test.go:1462: (dbg) Run:  kubectl --context functional-703463 expose deployment hello-node --type=NodePort --port=8080
functional_test.go:1467: (dbg) TestFunctional/parallel/ServiceCmd/DeployApp: waiting 10m0s for pods matching "app=hello-node" in namespace "default" ...
helpers_test.go:344: "hello-node-64fc58db8c-gnn9h" [96174eec-14b1-40ed-bb4d-cea220ac9020] Pending / Ready:ContainersNotReady (containers with unready status: [echoserver-arm]) / ContainersReady:ContainersNotReady (containers with unready status: [echoserver-arm])
helpers_test.go:344: "hello-node-64fc58db8c-gnn9h" [96174eec-14b1-40ed-bb4d-cea220ac9020] Running
functional_test.go:1467: (dbg) TestFunctional/parallel/ServiceCmd/DeployApp: app=hello-node healthy within 7.003652516s
--- PASS: TestFunctional/parallel/ServiceCmd/DeployApp (7.25s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/List (0.52s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/List
functional_test.go:1476: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 service list
--- PASS: TestFunctional/parallel/ServiceCmd/List (0.52s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/JSONOutput (0.5s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/JSONOutput
functional_test.go:1506: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 service list -o json
functional_test.go:1511: Took "501.08396ms" to run "out/minikube-linux-arm64 -p functional-703463 service list -o json"
--- PASS: TestFunctional/parallel/ServiceCmd/JSONOutput (0.50s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/HTTPS (0.38s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/HTTPS
functional_test.go:1526: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 service --namespace=default --https --url hello-node
functional_test.go:1539: found endpoint: https://192.168.49.2:30927
--- PASS: TestFunctional/parallel/ServiceCmd/HTTPS (0.38s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/Format (0.36s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/Format
functional_test.go:1557: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 service hello-node --url --format={{.IP}}
--- PASS: TestFunctional/parallel/ServiceCmd/Format (0.36s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/URL (0.37s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/URL
functional_test.go:1576: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 service hello-node --url
functional_test.go:1582: found endpoint for hello-node: http://192.168.49.2:30927
--- PASS: TestFunctional/parallel/ServiceCmd/URL (0.37s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_changes (0.16s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_changes
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_changes

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_changes
functional_test.go:2136: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_changes (0.16s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.14s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
functional_test.go:2136: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (0.14s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_clusters (0.15s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_clusters
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_clusters

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_clusters
functional_test.go:2136: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 update-context --alsologtostderr -v=2
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_clusters (0.15s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_not_create (0.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_not_create
functional_test.go:1287: (dbg) Run:  out/minikube-linux-arm64 profile lis
functional_test.go:1292: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestFunctional/parallel/ProfileCmd/profile_not_create (0.48s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_list (0.43s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_list
functional_test.go:1327: (dbg) Run:  out/minikube-linux-arm64 profile list
functional_test.go:1332: Took "366.710668ms" to run "out/minikube-linux-arm64 profile list"
functional_test.go:1341: (dbg) Run:  out/minikube-linux-arm64 profile list -l
functional_test.go:1346: Took "60.465169ms" to run "out/minikube-linux-arm64 profile list -l"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_list (0.43s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_json_output (0.41s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_json_output
functional_test.go:1378: (dbg) Run:  out/minikube-linux-arm64 profile list -o json
functional_test.go:1383: Took "350.901366ms" to run "out/minikube-linux-arm64 profile list -o json"
functional_test.go:1391: (dbg) Run:  out/minikube-linux-arm64 profile list -o json --light
functional_test.go:1396: Took "59.019448ms" to run "out/minikube-linux-arm64 profile list -o json --light"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_json_output (0.41s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/any-port (8.44s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/any-port
functional_test_mount_test.go:73: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdany-port1136669113/001:/mount-9p --alsologtostderr -v=1]
functional_test_mount_test.go:107: wrote "test-1742207750993542468" to /tmp/TestFunctionalparallelMountCmdany-port1136669113/001/created-by-test
functional_test_mount_test.go:107: wrote "test-1742207750993542468" to /tmp/TestFunctionalparallelMountCmdany-port1136669113/001/created-by-test-removed-by-pod
functional_test_mount_test.go:107: wrote "test-1742207750993542468" to /tmp/TestFunctionalparallelMountCmdany-port1136669113/001/test-1742207750993542468
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:115: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (448.738813ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
I0317 10:35:51.443367    7572 retry.go:31] will retry after 525.306794ms: exit status 1
functional_test_mount_test.go:115: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:129: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh -- ls -la /mount-9p
functional_test_mount_test.go:133: guest mount directory contents
total 2
-rw-r--r-- 1 docker docker 24 Mar 17 10:35 created-by-test
-rw-r--r-- 1 docker docker 24 Mar 17 10:35 created-by-test-removed-by-pod
-rw-r--r-- 1 docker docker 24 Mar 17 10:35 test-1742207750993542468
functional_test_mount_test.go:137: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh cat /mount-9p/test-1742207750993542468
functional_test_mount_test.go:148: (dbg) Run:  kubectl --context functional-703463 replace --force -f testdata/busybox-mount-test.yaml
functional_test_mount_test.go:153: (dbg) TestFunctional/parallel/MountCmd/any-port: waiting 4m0s for pods matching "integration-test=busybox-mount" in namespace "default" ...
helpers_test.go:344: "busybox-mount" [479d02aa-4660-4040-b807-9887b4cdf3f5] Pending
helpers_test.go:344: "busybox-mount" [479d02aa-4660-4040-b807-9887b4cdf3f5] Pending / Ready:ContainersNotReady (containers with unready status: [mount-munger]) / ContainersReady:ContainersNotReady (containers with unready status: [mount-munger])
helpers_test.go:344: "busybox-mount" [479d02aa-4660-4040-b807-9887b4cdf3f5] Pending / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
helpers_test.go:344: "busybox-mount" [479d02aa-4660-4040-b807-9887b4cdf3f5] Succeeded / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
functional_test_mount_test.go:153: (dbg) TestFunctional/parallel/MountCmd/any-port: integration-test=busybox-mount healthy within 5.004564231s
functional_test_mount_test.go:169: (dbg) Run:  kubectl --context functional-703463 logs busybox-mount
functional_test_mount_test.go:181: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh stat /mount-9p/created-by-test
functional_test_mount_test.go:181: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh stat /mount-9p/created-by-pod
functional_test_mount_test.go:90: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:94: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdany-port1136669113/001:/mount-9p --alsologtostderr -v=1] ...
--- PASS: TestFunctional/parallel/MountCmd/any-port (8.44s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/specific-port (2.3s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/specific-port
functional_test_mount_test.go:213: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdspecific-port3304410237/001:/mount-9p --alsologtostderr -v=1 --port 46464]
functional_test_mount_test.go:243: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:243: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T /mount-9p | grep 9p": exit status 1 (616.643569ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
I0317 10:36:00.047356    7572 retry.go:31] will retry after 398.259356ms: exit status 1
functional_test_mount_test.go:243: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T /mount-9p | grep 9p"
functional_test_mount_test.go:257: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh -- ls -la /mount-9p
functional_test_mount_test.go:261: guest mount directory contents
total 0
functional_test_mount_test.go:263: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdspecific-port3304410237/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
functional_test_mount_test.go:264: reading mount text
functional_test_mount_test.go:278: done reading mount text
functional_test_mount_test.go:230: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "sudo umount -f /mount-9p"
functional_test_mount_test.go:230: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh "sudo umount -f /mount-9p": exit status 1 (361.272164ms)

                                                
                                                
-- stdout --
	umount: /mount-9p: not mounted.

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 32

                                                
                                                
** /stderr **
functional_test_mount_test.go:232: "out/minikube-linux-arm64 -p functional-703463 ssh \"sudo umount -f /mount-9p\"": exit status 1
functional_test_mount_test.go:234: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdspecific-port3304410237/001:/mount-9p --alsologtostderr -v=1 --port 46464] ...
--- PASS: TestFunctional/parallel/MountCmd/specific-port (2.30s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd/VerifyCleanup (2.69s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd/VerifyCleanup
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3171206327/001:/mount1 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3171206327/001:/mount2 --alsologtostderr -v=1]
functional_test_mount_test.go:298: (dbg) daemon: [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3171206327/001:/mount3 --alsologtostderr -v=1]
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T" /mount1
functional_test_mount_test.go:325: (dbg) Non-zero exit: out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T" /mount1: exit status 1 (1.186944273s)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
I0317 10:36:02.921626    7572 retry.go:31] will retry after 626.159513ms: exit status 1
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T" /mount1
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T" /mount2
functional_test_mount_test.go:325: (dbg) Run:  out/minikube-linux-arm64 -p functional-703463 ssh "findmnt -T" /mount3
functional_test_mount_test.go:370: (dbg) Run:  out/minikube-linux-arm64 mount -p functional-703463 --kill=true
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3171206327/001:/mount1 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3171206327/001:/mount2 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_mount_test.go:313: (dbg) stopping [out/minikube-linux-arm64 mount -p functional-703463 /tmp/TestFunctionalparallelMountCmdVerifyCleanup3171206327/001:/mount3 --alsologtostderr -v=1] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
--- PASS: TestFunctional/parallel/MountCmd/VerifyCleanup (2.69s)

                                                
                                    
x
+
TestFunctional/delete_echo-server_images (0.04s)

                                                
                                                
=== RUN   TestFunctional/delete_echo-server_images
functional_test.go:207: (dbg) Run:  docker rmi -f kicbase/echo-server:1.0
functional_test.go:207: (dbg) Run:  docker rmi -f kicbase/echo-server:functional-703463
--- PASS: TestFunctional/delete_echo-server_images (0.04s)

                                                
                                    
x
+
TestFunctional/delete_my-image_image (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_my-image_image
functional_test.go:215: (dbg) Run:  docker rmi -f localhost/my-image:functional-703463
--- PASS: TestFunctional/delete_my-image_image (0.02s)

                                                
                                    
x
+
TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                                
=== RUN   TestFunctional/delete_minikube_cached_images
functional_test.go:223: (dbg) Run:  docker rmi -f minikube-local-cache-test:functional-703463
--- PASS: TestFunctional/delete_minikube_cached_images (0.02s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StartCluster (117.58s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StartCluster
ha_test.go:101: (dbg) Run:  out/minikube-linux-arm64 start -p ha-922188 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd
E0317 10:37:01.104556    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:101: (dbg) Done: out/minikube-linux-arm64 start -p ha-922188 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m56.763307388s)
ha_test.go:107: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
--- PASS: TestMultiControlPlane/serial/StartCluster (117.58s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeployApp (34.9s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeployApp
ha_test.go:128: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- apply -f ./testdata/ha/ha-pod-dns-test.yaml
ha_test.go:133: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- rollout status deployment/busybox
ha_test.go:133: (dbg) Done: out/minikube-linux-arm64 kubectl -p ha-922188 -- rollout status deployment/busybox: (31.803948097s)
ha_test.go:140: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- get pods -o jsonpath='{.items[*].status.podIP}'
ha_test.go:163: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ftfx2 -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ggnsg -- nslookup kubernetes.io
ha_test.go:171: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-p2txx -- nslookup kubernetes.io
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ftfx2 -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ggnsg -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-p2txx -- nslookup kubernetes.default
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ftfx2 -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ggnsg -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-p2txx -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiControlPlane/serial/DeployApp (34.90s)

                                                
                                    
x
+
TestMultiControlPlane/serial/PingHostFromPods (1.74s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/PingHostFromPods
ha_test.go:199: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ftfx2 -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ftfx2 -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ggnsg -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-ggnsg -- sh -c "ping -c 1 192.168.49.1"
ha_test.go:207: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-p2txx -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-linux-arm64 kubectl -p ha-922188 -- exec busybox-58667487b6-p2txx -- sh -c "ping -c 1 192.168.49.1"
--- PASS: TestMultiControlPlane/serial/PingHostFromPods (1.74s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddWorkerNode (21.45s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddWorkerNode
ha_test.go:228: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-922188 -v=7 --alsologtostderr
ha_test.go:228: (dbg) Done: out/minikube-linux-arm64 node add -p ha-922188 -v=7 --alsologtostderr: (20.457002936s)
ha_test.go:234: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
--- PASS: TestMultiControlPlane/serial/AddWorkerNode (21.45s)

                                                
                                    
x
+
TestMultiControlPlane/serial/NodeLabels (0.11s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/NodeLabels
ha_test.go:255: (dbg) Run:  kubectl --context ha-922188 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
--- PASS: TestMultiControlPlane/serial/NodeLabels (0.11s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterClusterStart (0.97s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterClusterStart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/HAppyAfterClusterStart (0.97s)

                                                
                                    
x
+
TestMultiControlPlane/serial/CopyFile (19.08s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/CopyFile
ha_test.go:328: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status --output json -v=7 --alsologtostderr
ha_test.go:328: (dbg) Done: out/minikube-linux-arm64 -p ha-922188 status --output json -v=7 --alsologtostderr: (1.035769359s)
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp testdata/cp-test.txt ha-922188:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile1666047113/001/cp-test_ha-922188.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188:/home/docker/cp-test.txt ha-922188-m02:/home/docker/cp-test_ha-922188_ha-922188-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test_ha-922188_ha-922188-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188:/home/docker/cp-test.txt ha-922188-m03:/home/docker/cp-test_ha-922188_ha-922188-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test_ha-922188_ha-922188-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188:/home/docker/cp-test.txt ha-922188-m04:/home/docker/cp-test_ha-922188_ha-922188-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test_ha-922188_ha-922188-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp testdata/cp-test.txt ha-922188-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m02:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile1666047113/001/cp-test_ha-922188-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m02:/home/docker/cp-test.txt ha-922188:/home/docker/cp-test_ha-922188-m02_ha-922188.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test_ha-922188-m02_ha-922188.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m02:/home/docker/cp-test.txt ha-922188-m03:/home/docker/cp-test_ha-922188-m02_ha-922188-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test.txt"
E0317 10:39:17.244670    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test_ha-922188-m02_ha-922188-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m02:/home/docker/cp-test.txt ha-922188-m04:/home/docker/cp-test_ha-922188-m02_ha-922188-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test_ha-922188-m02_ha-922188-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp testdata/cp-test.txt ha-922188-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m03:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile1666047113/001/cp-test_ha-922188-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m03:/home/docker/cp-test.txt ha-922188:/home/docker/cp-test_ha-922188-m03_ha-922188.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test_ha-922188-m03_ha-922188.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m03:/home/docker/cp-test.txt ha-922188-m02:/home/docker/cp-test_ha-922188-m03_ha-922188-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test_ha-922188-m03_ha-922188-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m03:/home/docker/cp-test.txt ha-922188-m04:/home/docker/cp-test_ha-922188-m03_ha-922188-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test_ha-922188-m03_ha-922188-m04.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp testdata/cp-test.txt ha-922188-m04:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m04:/home/docker/cp-test.txt /tmp/TestMultiControlPlaneserialCopyFile1666047113/001/cp-test_ha-922188-m04.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m04:/home/docker/cp-test.txt ha-922188:/home/docker/cp-test_ha-922188-m04_ha-922188.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188 "sudo cat /home/docker/cp-test_ha-922188-m04_ha-922188.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m04:/home/docker/cp-test.txt ha-922188-m02:/home/docker/cp-test_ha-922188-m04_ha-922188-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m02 "sudo cat /home/docker/cp-test_ha-922188-m04_ha-922188-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 cp ha-922188-m04:/home/docker/cp-test.txt ha-922188-m03:/home/docker/cp-test_ha-922188-m04_ha-922188-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 ssh -n ha-922188-m03 "sudo cat /home/docker/cp-test_ha-922188-m04_ha-922188-m03.txt"
--- PASS: TestMultiControlPlane/serial/CopyFile (19.08s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopSecondaryNode (12.79s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopSecondaryNode
ha_test.go:365: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 node stop m02 -v=7 --alsologtostderr
ha_test.go:365: (dbg) Done: out/minikube-linux-arm64 -p ha-922188 node stop m02 -v=7 --alsologtostderr: (12.039506944s)
ha_test.go:371: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
ha_test.go:371: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr: exit status 7 (750.809975ms)

                                                
                                                
-- stdout --
	ha-922188
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-922188-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-922188-m03
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	ha-922188-m04
	type: Worker
	host: Running
	kubelet: Running
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0317 10:39:39.944501   62793 out.go:345] Setting OutFile to fd 1 ...
	I0317 10:39:39.944761   62793 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:39:39.944786   62793 out.go:358] Setting ErrFile to fd 2...
	I0317 10:39:39.944806   62793 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:39:39.945123   62793 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 10:39:39.945397   62793 out.go:352] Setting JSON to false
	I0317 10:39:39.945449   62793 mustload.go:65] Loading cluster: ha-922188
	I0317 10:39:39.945894   62793 config.go:182] Loaded profile config "ha-922188": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:39:39.945934   62793 status.go:174] checking status of ha-922188 ...
	I0317 10:39:39.946640   62793 cli_runner.go:164] Run: docker container inspect ha-922188 --format={{.State.Status}}
	I0317 10:39:39.946809   62793 notify.go:220] Checking for updates...
	I0317 10:39:39.967987   62793 status.go:371] ha-922188 host status = "Running" (err=<nil>)
	I0317 10:39:39.968016   62793 host.go:66] Checking if "ha-922188" exists ...
	I0317 10:39:39.968320   62793 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-922188
	I0317 10:39:39.991944   62793 host.go:66] Checking if "ha-922188" exists ...
	I0317 10:39:39.992257   62793 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 10:39:39.992300   62793 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-922188
	I0317 10:39:40.029603   62793 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32788 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/ha-922188/id_rsa Username:docker}
	I0317 10:39:40.122958   62793 ssh_runner.go:195] Run: systemctl --version
	I0317 10:39:40.127889   62793 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0317 10:39:40.144068   62793 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 10:39:40.208650   62793 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:4 ContainersRunning:3 ContainersPaused:0 ContainersStopped:1 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:69 OomKillDisable:true NGoroutines:72 SystemTime:2025-03-17 10:39:40.197868164 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 10:39:40.209466   62793 kubeconfig.go:125] found "ha-922188" server: "https://192.168.49.254:8443"
	I0317 10:39:40.209571   62793 api_server.go:166] Checking apiserver status ...
	I0317 10:39:40.209617   62793 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0317 10:39:40.222970   62793 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1533/cgroup
	I0317 10:39:40.233423   62793 api_server.go:182] apiserver freezer: "11:freezer:/docker/d7153c94b8c2498f59ffecbaf0940d9747c75e6a19b41949eef0c648eb1fa0a1/kubepods/burstable/pode9772f71059ffea07938dd2e55398c02/23841c3a7b87128206a0a036109b483603c537df57b7c590b6c76e068d394c03"
	I0317 10:39:40.233504   62793 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/d7153c94b8c2498f59ffecbaf0940d9747c75e6a19b41949eef0c648eb1fa0a1/kubepods/burstable/pode9772f71059ffea07938dd2e55398c02/23841c3a7b87128206a0a036109b483603c537df57b7c590b6c76e068d394c03/freezer.state
	I0317 10:39:40.245843   62793 api_server.go:204] freezer state: "THAWED"
	I0317 10:39:40.245881   62793 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0317 10:39:40.254102   62793 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0317 10:39:40.254133   62793 status.go:463] ha-922188 apiserver status = Running (err=<nil>)
	I0317 10:39:40.254144   62793 status.go:176] ha-922188 status: &{Name:ha-922188 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0317 10:39:40.254161   62793 status.go:174] checking status of ha-922188-m02 ...
	I0317 10:39:40.254472   62793 cli_runner.go:164] Run: docker container inspect ha-922188-m02 --format={{.State.Status}}
	I0317 10:39:40.284002   62793 status.go:371] ha-922188-m02 host status = "Stopped" (err=<nil>)
	I0317 10:39:40.284024   62793 status.go:384] host is not running, skipping remaining checks
	I0317 10:39:40.284031   62793 status.go:176] ha-922188-m02 status: &{Name:ha-922188-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0317 10:39:40.284051   62793 status.go:174] checking status of ha-922188-m03 ...
	I0317 10:39:40.284363   62793 cli_runner.go:164] Run: docker container inspect ha-922188-m03 --format={{.State.Status}}
	I0317 10:39:40.302182   62793 status.go:371] ha-922188-m03 host status = "Running" (err=<nil>)
	I0317 10:39:40.302203   62793 host.go:66] Checking if "ha-922188-m03" exists ...
	I0317 10:39:40.302635   62793 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-922188-m03
	I0317 10:39:40.320401   62793 host.go:66] Checking if "ha-922188-m03" exists ...
	I0317 10:39:40.320728   62793 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 10:39:40.320777   62793 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-922188-m03
	I0317 10:39:40.339140   62793 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32798 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/ha-922188-m03/id_rsa Username:docker}
	I0317 10:39:40.432220   62793 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0317 10:39:40.445073   62793 kubeconfig.go:125] found "ha-922188" server: "https://192.168.49.254:8443"
	I0317 10:39:40.445104   62793 api_server.go:166] Checking apiserver status ...
	I0317 10:39:40.445147   62793 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0317 10:39:40.457178   62793 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1370/cgroup
	I0317 10:39:40.466819   62793 api_server.go:182] apiserver freezer: "11:freezer:/docker/e33ad978a63925994ce4574694cfcd3ace03678a0639e96be0785ab3c692cc49/kubepods/burstable/pod206f20d2ad1736df1541c7fada426209/a9d272b238e7e78c0203d0e8b5737f95849cd258df1b88e1196abb2382eeb5d6"
	I0317 10:39:40.466946   62793 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/e33ad978a63925994ce4574694cfcd3ace03678a0639e96be0785ab3c692cc49/kubepods/burstable/pod206f20d2ad1736df1541c7fada426209/a9d272b238e7e78c0203d0e8b5737f95849cd258df1b88e1196abb2382eeb5d6/freezer.state
	I0317 10:39:40.476227   62793 api_server.go:204] freezer state: "THAWED"
	I0317 10:39:40.476255   62793 api_server.go:253] Checking apiserver healthz at https://192.168.49.254:8443/healthz ...
	I0317 10:39:40.484380   62793 api_server.go:279] https://192.168.49.254:8443/healthz returned 200:
	ok
	I0317 10:39:40.484425   62793 status.go:463] ha-922188-m03 apiserver status = Running (err=<nil>)
	I0317 10:39:40.484435   62793 status.go:176] ha-922188-m03 status: &{Name:ha-922188-m03 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0317 10:39:40.484455   62793 status.go:174] checking status of ha-922188-m04 ...
	I0317 10:39:40.484761   62793 cli_runner.go:164] Run: docker container inspect ha-922188-m04 --format={{.State.Status}}
	I0317 10:39:40.502951   62793 status.go:371] ha-922188-m04 host status = "Running" (err=<nil>)
	I0317 10:39:40.502979   62793 host.go:66] Checking if "ha-922188-m04" exists ...
	I0317 10:39:40.503273   62793 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" ha-922188-m04
	I0317 10:39:40.521110   62793 host.go:66] Checking if "ha-922188-m04" exists ...
	I0317 10:39:40.521424   62793 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 10:39:40.521480   62793 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" ha-922188-m04
	I0317 10:39:40.539923   62793 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32803 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/ha-922188-m04/id_rsa Username:docker}
	I0317 10:39:40.627897   62793 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0317 10:39:40.639950   62793 status.go:176] ha-922188-m04 status: &{Name:ha-922188-m04 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopSecondaryNode (12.79s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.77s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop
ha_test.go:392: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterControlPlaneNodeStop (0.77s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartSecondaryNode (17.81s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartSecondaryNode
ha_test.go:422: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 node start m02 -v=7 --alsologtostderr
E0317 10:39:44.947561    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:422: (dbg) Done: out/minikube-linux-arm64 -p ha-922188 node start m02 -v=7 --alsologtostderr: (16.735623341s)
ha_test.go:430: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
ha_test.go:450: (dbg) Run:  kubectl get nodes
--- PASS: TestMultiControlPlane/serial/RestartSecondaryNode (17.81s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (1.59s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
ha_test.go:281: (dbg) Done: out/minikube-linux-arm64 profile list --output json: (1.587927009s)
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeRestart (1.59s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartClusterKeepsNodes (135.43s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartClusterKeepsNodes
ha_test.go:458: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-922188 -v=7 --alsologtostderr
ha_test.go:464: (dbg) Run:  out/minikube-linux-arm64 stop -p ha-922188 -v=7 --alsologtostderr
E0317 10:40:19.263064    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:19.269782    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:19.281164    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:19.302482    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:19.343835    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:19.425214    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:19.586651    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:19.908065    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:20.549711    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:21.831021    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:24.393604    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:40:29.514995    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:464: (dbg) Done: out/minikube-linux-arm64 stop -p ha-922188 -v=7 --alsologtostderr: (37.301584314s)
ha_test.go:469: (dbg) Run:  out/minikube-linux-arm64 start -p ha-922188 --wait=true -v=7 --alsologtostderr
E0317 10:40:39.756926    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:41:00.239013    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:41:41.200250    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:469: (dbg) Done: out/minikube-linux-arm64 start -p ha-922188 --wait=true -v=7 --alsologtostderr: (1m37.937276227s)
ha_test.go:474: (dbg) Run:  out/minikube-linux-arm64 node list -p ha-922188
--- PASS: TestMultiControlPlane/serial/RestartClusterKeepsNodes (135.43s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeleteSecondaryNode (10.95s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeleteSecondaryNode
ha_test.go:489: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 node delete m03 -v=7 --alsologtostderr
ha_test.go:489: (dbg) Done: out/minikube-linux-arm64 -p ha-922188 node delete m03 -v=7 --alsologtostderr: (10.049733642s)
ha_test.go:495: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
ha_test.go:513: (dbg) Run:  kubectl get nodes
ha_test.go:521: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiControlPlane/serial/DeleteSecondaryNode (10.95s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.72s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete
ha_test.go:392: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterSecondaryNodeDelete (0.72s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopCluster (35.95s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopCluster
ha_test.go:533: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 stop -v=7 --alsologtostderr
E0317 10:43:03.125022    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:533: (dbg) Done: out/minikube-linux-arm64 -p ha-922188 stop -v=7 --alsologtostderr: (35.834055271s)
ha_test.go:539: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
ha_test.go:539: (dbg) Non-zero exit: out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr: exit status 7 (111.67661ms)

                                                
                                                
-- stdout --
	ha-922188
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-922188-m02
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	ha-922188-m04
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0317 10:43:03.801612   77521 out.go:345] Setting OutFile to fd 1 ...
	I0317 10:43:03.801758   77521 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:43:03.801784   77521 out.go:358] Setting ErrFile to fd 2...
	I0317 10:43:03.801803   77521 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 10:43:03.802075   77521 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 10:43:03.802293   77521 out.go:352] Setting JSON to false
	I0317 10:43:03.802340   77521 mustload.go:65] Loading cluster: ha-922188
	I0317 10:43:03.802388   77521 notify.go:220] Checking for updates...
	I0317 10:43:03.802799   77521 config.go:182] Loaded profile config "ha-922188": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 10:43:03.802822   77521 status.go:174] checking status of ha-922188 ...
	I0317 10:43:03.803428   77521 cli_runner.go:164] Run: docker container inspect ha-922188 --format={{.State.Status}}
	I0317 10:43:03.822579   77521 status.go:371] ha-922188 host status = "Stopped" (err=<nil>)
	I0317 10:43:03.822603   77521 status.go:384] host is not running, skipping remaining checks
	I0317 10:43:03.822610   77521 status.go:176] ha-922188 status: &{Name:ha-922188 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0317 10:43:03.822658   77521 status.go:174] checking status of ha-922188-m02 ...
	I0317 10:43:03.823038   77521 cli_runner.go:164] Run: docker container inspect ha-922188-m02 --format={{.State.Status}}
	I0317 10:43:03.840282   77521 status.go:371] ha-922188-m02 host status = "Stopped" (err=<nil>)
	I0317 10:43:03.840304   77521 status.go:384] host is not running, skipping remaining checks
	I0317 10:43:03.840310   77521 status.go:176] ha-922188-m02 status: &{Name:ha-922188-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0317 10:43:03.840332   77521 status.go:174] checking status of ha-922188-m04 ...
	I0317 10:43:03.840619   77521 cli_runner.go:164] Run: docker container inspect ha-922188-m04 --format={{.State.Status}}
	I0317 10:43:03.864376   77521 status.go:371] ha-922188-m04 host status = "Stopped" (err=<nil>)
	I0317 10:43:03.864397   77521 status.go:384] host is not running, skipping remaining checks
	I0317 10:43:03.864404   77521 status.go:176] ha-922188-m04 status: &{Name:ha-922188-m04 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiControlPlane/serial/StopCluster (35.95s)

                                                
                                    
x
+
TestMultiControlPlane/serial/RestartCluster (73.5s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/RestartCluster
ha_test.go:562: (dbg) Run:  out/minikube-linux-arm64 start -p ha-922188 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd
ha_test.go:562: (dbg) Done: out/minikube-linux-arm64 start -p ha-922188 --wait=true -v=7 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m12.563978225s)
ha_test.go:568: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
ha_test.go:586: (dbg) Run:  kubectl get nodes
E0317 10:44:17.243820    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
ha_test.go:594: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiControlPlane/serial/RestartCluster (73.50s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.7s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DegradedAfterClusterRestart
ha_test.go:392: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiControlPlane/serial/DegradedAfterClusterRestart (0.70s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddSecondaryNode (44.62s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddSecondaryNode
ha_test.go:607: (dbg) Run:  out/minikube-linux-arm64 node add -p ha-922188 --control-plane -v=7 --alsologtostderr
ha_test.go:607: (dbg) Done: out/minikube-linux-arm64 node add -p ha-922188 --control-plane -v=7 --alsologtostderr: (43.566752987s)
ha_test.go:613: (dbg) Run:  out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr
ha_test.go:613: (dbg) Done: out/minikube-linux-arm64 -p ha-922188 status -v=7 --alsologtostderr: (1.052914436s)
--- PASS: TestMultiControlPlane/serial/AddSecondaryNode (44.62s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (1s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd
ha_test.go:281: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
ha_test.go:281: (dbg) Done: out/minikube-linux-arm64 profile list --output json: (1.003242987s)
--- PASS: TestMultiControlPlane/serial/HAppyAfterSecondaryNodeAdd (1.00s)

                                                
                                    
x
+
TestJSONOutput/start/Command (68.85s)

                                                
                                                
=== RUN   TestJSONOutput/start/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-747868 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=containerd
E0317 10:45:19.263015    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
E0317 10:45:46.967058    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 start -p json-output-747868 --output=json --user=testUser --memory=2200 --wait=true --driver=docker  --container-runtime=containerd: (1m8.845571802s)
--- PASS: TestJSONOutput/start/Command (68.85s)

                                                
                                    
x
+
TestJSONOutput/start/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/Audit
--- PASS: TestJSONOutput/start/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/start/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/start/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/Command (0.74s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 pause -p json-output-747868 --output=json --user=testUser
--- PASS: TestJSONOutput/pause/Command (0.74s)

                                                
                                    
x
+
TestJSONOutput/pause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Audit
--- PASS: TestJSONOutput/pause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/Command (0.65s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 unpause -p json-output-747868 --output=json --user=testUser
--- PASS: TestJSONOutput/unpause/Command (0.65s)

                                                
                                    
x
+
TestJSONOutput/unpause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Audit
--- PASS: TestJSONOutput/unpause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/Command (5.79s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Command
json_output_test.go:63: (dbg) Run:  out/minikube-linux-arm64 stop -p json-output-747868 --output=json --user=testUser
json_output_test.go:63: (dbg) Done: out/minikube-linux-arm64 stop -p json-output-747868 --output=json --user=testUser: (5.793949804s)
--- PASS: TestJSONOutput/stop/Command (5.79s)

                                                
                                    
x
+
TestJSONOutput/stop/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Audit
--- PASS: TestJSONOutput/stop/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestErrorJSONOutput (0.25s)

                                                
                                                
=== RUN   TestErrorJSONOutput
json_output_test.go:160: (dbg) Run:  out/minikube-linux-arm64 start -p json-output-error-702255 --memory=2200 --output=json --wait=true --driver=fail
json_output_test.go:160: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p json-output-error-702255 --memory=2200 --output=json --wait=true --driver=fail: exit status 56 (96.96296ms)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"9e9e0bec-0e71-4516-ac82-6a9875384d3a","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[json-output-error-702255] minikube v1.35.0 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"2c8c7276-62d7-4174-87a2-1e885a259a72","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=20535"}}
	{"specversion":"1.0","id":"264ea66c-4db3-498d-a218-5b692ccf7016","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"1fd4f574-f5ff-4c1f-a597-38dab168cfe9","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig"}}
	{"specversion":"1.0","id":"228bc0f1-3f6e-40da-bd07-c484b4a0ff4f","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube"}}
	{"specversion":"1.0","id":"acb93b14-d765-4012-a5a1-ead2d793b82e","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"a7980f1b-fb04-43c7-9ee9-ab7e3a7da005","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"0d97bd61-f96a-446e-b86a-2b3307edac77","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"","exitcode":"56","issues":"","message":"The driver 'fail' is not supported on linux/arm64","name":"DRV_UNSUPPORTED_OS","url":""}}

                                                
                                                
-- /stdout --
helpers_test.go:175: Cleaning up "json-output-error-702255" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p json-output-error-702255
--- PASS: TestErrorJSONOutput (0.25s)

                                                
                                    
x
+
TestKicCustomNetwork/create_custom_network (38.64s)

                                                
                                                
=== RUN   TestKicCustomNetwork/create_custom_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-177664 --network=
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-177664 --network=: (36.423713621s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-177664" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-177664
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-177664: (2.184091843s)
--- PASS: TestKicCustomNetwork/create_custom_network (38.64s)

                                                
                                    
x
+
TestKicCustomNetwork/use_default_bridge_network (35.29s)

                                                
                                                
=== RUN   TestKicCustomNetwork/use_default_bridge_network
kic_custom_network_test.go:57: (dbg) Run:  out/minikube-linux-arm64 start -p docker-network-237706 --network=bridge
kic_custom_network_test.go:57: (dbg) Done: out/minikube-linux-arm64 start -p docker-network-237706 --network=bridge: (33.29543556s)
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
helpers_test.go:175: Cleaning up "docker-network-237706" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p docker-network-237706
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p docker-network-237706: (1.978069656s)
--- PASS: TestKicCustomNetwork/use_default_bridge_network (35.29s)

                                                
                                    
x
+
TestKicExistingNetwork (32.71s)

                                                
                                                
=== RUN   TestKicExistingNetwork
I0317 10:47:47.520509    7572 cli_runner.go:164] Run: docker network inspect existing-network --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
W0317 10:47:47.536948    7572 cli_runner.go:211] docker network inspect existing-network --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}" returned with exit code 1
I0317 10:47:47.537024    7572 network_create.go:284] running [docker network inspect existing-network] to gather additional debugging logs...
I0317 10:47:47.537042    7572 cli_runner.go:164] Run: docker network inspect existing-network
W0317 10:47:47.552000    7572 cli_runner.go:211] docker network inspect existing-network returned with exit code 1
I0317 10:47:47.552033    7572 network_create.go:287] error running [docker network inspect existing-network]: docker network inspect existing-network: exit status 1
stdout:
[]

                                                
                                                
stderr:
Error response from daemon: network existing-network not found
I0317 10:47:47.552046    7572 network_create.go:289] output of [docker network inspect existing-network]: -- stdout --
[]

                                                
                                                
-- /stdout --
** stderr ** 
Error response from daemon: network existing-network not found

                                                
                                                
** /stderr **
I0317 10:47:47.552236    7572 cli_runner.go:164] Run: docker network inspect bridge --format "{"Name": "{{.Name}}","Driver": "{{.Driver}}","Subnet": "{{range .IPAM.Config}}{{.Subnet}}{{end}}","Gateway": "{{range .IPAM.Config}}{{.Gateway}}{{end}}","MTU": {{if (index .Options "com.docker.network.driver.mtu")}}{{(index .Options "com.docker.network.driver.mtu")}}{{else}}0{{end}}, "ContainerIPs": [{{range $k,$v := .Containers }}"{{$v.IPv4Address}}",{{end}}]}"
I0317 10:47:47.570055    7572 network.go:211] skipping subnet 192.168.49.0/24 that is taken: &{IP:192.168.49.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.49.0/24 Gateway:192.168.49.1 ClientMin:192.168.49.2 ClientMax:192.168.49.254 Broadcast:192.168.49.255 IsPrivate:true Interface:{IfaceName:br-e774881651be IfaceIPv4:192.168.49.1 IfaceMTU:1500 IfaceMAC:12:df:c7:61:5e:f1} reservation:<nil>}
I0317 10:47:47.571095    7572 network.go:206] using free private subnet 192.168.58.0/24: &{IP:192.168.58.0 Netmask:255.255.255.0 Prefix:24 CIDR:192.168.58.0/24 Gateway:192.168.58.1 ClientMin:192.168.58.2 ClientMax:192.168.58.254 Broadcast:192.168.58.255 IsPrivate:true Interface:{IfaceName: IfaceIPv4: IfaceMTU:0 IfaceMAC:} reservation:0x400055fc40}
I0317 10:47:47.571120    7572 network_create.go:124] attempt to create docker network existing-network 192.168.58.0/24 with gateway 192.168.58.1 and MTU of 1500 ...
I0317 10:47:47.571169    7572 cli_runner.go:164] Run: docker network create --driver=bridge --subnet=192.168.58.0/24 --gateway=192.168.58.1 -o --ip-masq -o --icc -o com.docker.network.driver.mtu=1500 --label=created_by.minikube.sigs.k8s.io=true --label=name.minikube.sigs.k8s.io=existing-network existing-network
I0317 10:47:47.643219    7572 network_create.go:108] docker network existing-network 192.168.58.0/24 created
kic_custom_network_test.go:150: (dbg) Run:  docker network ls --format {{.Name}}
kic_custom_network_test.go:93: (dbg) Run:  out/minikube-linux-arm64 start -p existing-network-241637 --network=existing-network
kic_custom_network_test.go:93: (dbg) Done: out/minikube-linux-arm64 start -p existing-network-241637 --network=existing-network: (30.52965068s)
helpers_test.go:175: Cleaning up "existing-network-241637" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p existing-network-241637
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p existing-network-241637: (2.021687734s)
I0317 10:48:20.212776    7572 cli_runner.go:164] Run: docker network ls --filter=label=existing-network --format {{.Name}}
--- PASS: TestKicExistingNetwork (32.71s)

                                                
                                    
x
+
TestKicCustomSubnet (36.42s)

                                                
                                                
=== RUN   TestKicCustomSubnet
kic_custom_network_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p custom-subnet-173515 --subnet=192.168.60.0/24
kic_custom_network_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p custom-subnet-173515 --subnet=192.168.60.0/24: (34.245176891s)
kic_custom_network_test.go:161: (dbg) Run:  docker network inspect custom-subnet-173515 --format "{{(index .IPAM.Config 0).Subnet}}"
helpers_test.go:175: Cleaning up "custom-subnet-173515" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p custom-subnet-173515
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p custom-subnet-173515: (2.1553035s)
--- PASS: TestKicCustomSubnet (36.42s)

                                                
                                    
x
+
TestKicStaticIP (33.32s)

                                                
                                                
=== RUN   TestKicStaticIP
kic_custom_network_test.go:132: (dbg) Run:  out/minikube-linux-arm64 start -p static-ip-740865 --static-ip=192.168.200.200
E0317 10:49:17.245345    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
kic_custom_network_test.go:132: (dbg) Done: out/minikube-linux-arm64 start -p static-ip-740865 --static-ip=192.168.200.200: (31.416262314s)
kic_custom_network_test.go:138: (dbg) Run:  out/minikube-linux-arm64 -p static-ip-740865 ip
helpers_test.go:175: Cleaning up "static-ip-740865" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p static-ip-740865
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p static-ip-740865: (1.745658436s)
--- PASS: TestKicStaticIP (33.32s)

                                                
                                    
x
+
TestMainNoArgs (0.07s)

                                                
                                                
=== RUN   TestMainNoArgs
main_test.go:68: (dbg) Run:  out/minikube-linux-arm64
--- PASS: TestMainNoArgs (0.07s)

                                                
                                    
x
+
TestMinikubeProfile (67.76s)

                                                
                                                
=== RUN   TestMinikubeProfile
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p first-844690 --driver=docker  --container-runtime=containerd
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p first-844690 --driver=docker  --container-runtime=containerd: (28.446401963s)
minikube_profile_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p second-847890 --driver=docker  --container-runtime=containerd
E0317 10:50:19.263193    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
minikube_profile_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p second-847890 --driver=docker  --container-runtime=containerd: (33.646137026s)
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile first-844690
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
minikube_profile_test.go:51: (dbg) Run:  out/minikube-linux-arm64 profile second-847890
minikube_profile_test.go:55: (dbg) Run:  out/minikube-linux-arm64 profile list -ojson
helpers_test.go:175: Cleaning up "second-847890" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p second-847890
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p second-847890: (1.998926633s)
helpers_test.go:175: Cleaning up "first-844690" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p first-844690
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p first-844690: (2.269740438s)
--- PASS: TestMinikubeProfile (67.76s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountFirst (5.95s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountFirst
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-1-407958 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd
E0317 10:50:40.309078    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-1-407958 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd: (4.953960846s)
--- PASS: TestMountStart/serial/StartWithMountFirst (5.95s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountFirst (0.26s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountFirst
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-1-407958 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountFirst (0.26s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountSecond (6.76s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountSecond
mount_start_test.go:98: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-409903 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd
mount_start_test.go:98: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-409903 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=docker  --container-runtime=containerd: (5.76251919s)
--- PASS: TestMountStart/serial/StartWithMountSecond (6.76s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountSecond (0.25s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountSecond
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-409903 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountSecond (0.25s)

                                                
                                    
x
+
TestMountStart/serial/DeleteFirst (1.61s)

                                                
                                                
=== RUN   TestMountStart/serial/DeleteFirst
pause_test.go:132: (dbg) Run:  out/minikube-linux-arm64 delete -p mount-start-1-407958 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-linux-arm64 delete -p mount-start-1-407958 --alsologtostderr -v=5: (1.609577536s)
--- PASS: TestMountStart/serial/DeleteFirst (1.61s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostDelete (0.25s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostDelete
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-409903 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostDelete (0.25s)

                                                
                                    
x
+
TestMountStart/serial/Stop (1.2s)

                                                
                                                
=== RUN   TestMountStart/serial/Stop
mount_start_test.go:155: (dbg) Run:  out/minikube-linux-arm64 stop -p mount-start-2-409903
mount_start_test.go:155: (dbg) Done: out/minikube-linux-arm64 stop -p mount-start-2-409903: (1.197557777s)
--- PASS: TestMountStart/serial/Stop (1.20s)

                                                
                                    
x
+
TestMountStart/serial/RestartStopped (7.17s)

                                                
                                                
=== RUN   TestMountStart/serial/RestartStopped
mount_start_test.go:166: (dbg) Run:  out/minikube-linux-arm64 start -p mount-start-2-409903
mount_start_test.go:166: (dbg) Done: out/minikube-linux-arm64 start -p mount-start-2-409903: (6.171328753s)
--- PASS: TestMountStart/serial/RestartStopped (7.17s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostStop (0.28s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostStop
mount_start_test.go:114: (dbg) Run:  out/minikube-linux-arm64 -p mount-start-2-409903 ssh -- ls /minikube-host
--- PASS: TestMountStart/serial/VerifyMountPostStop (0.28s)

                                                
                                    
x
+
TestMultiNode/serial/FreshStart2Nodes (63.95s)

                                                
                                                
=== RUN   TestMultiNode/serial/FreshStart2Nodes
multinode_test.go:96: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-286863 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd
multinode_test.go:96: (dbg) Done: out/minikube-linux-arm64 start -p multinode-286863 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd: (1m3.450829755s)
multinode_test.go:102: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr
--- PASS: TestMultiNode/serial/FreshStart2Nodes (63.95s)

                                                
                                    
x
+
TestMultiNode/serial/PingHostFrom2Pods (1.06s)

                                                
                                                
=== RUN   TestMultiNode/serial/PingHostFrom2Pods
multinode_test.go:564: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-6q5tk -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-6q5tk -- sh -c "ping -c 1 192.168.67.1"
multinode_test.go:572: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-kn9lf -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-linux-arm64 kubectl -p multinode-286863 -- exec busybox-58667487b6-kn9lf -- sh -c "ping -c 1 192.168.67.1"
--- PASS: TestMultiNode/serial/PingHostFrom2Pods (1.06s)

                                                
                                    
x
+
TestMultiNode/serial/AddNode (17.43s)

                                                
                                                
=== RUN   TestMultiNode/serial/AddNode
multinode_test.go:121: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-286863 -v 3 --alsologtostderr
multinode_test.go:121: (dbg) Done: out/minikube-linux-arm64 node add -p multinode-286863 -v 3 --alsologtostderr: (16.771902244s)
multinode_test.go:127: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr
--- PASS: TestMultiNode/serial/AddNode (17.43s)

                                                
                                    
x
+
TestMultiNode/serial/MultiNodeLabels (0.1s)

                                                
                                                
=== RUN   TestMultiNode/serial/MultiNodeLabels
multinode_test.go:221: (dbg) Run:  kubectl --context multinode-286863 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
--- PASS: TestMultiNode/serial/MultiNodeLabels (0.10s)

                                                
                                    
x
+
TestMultiNode/serial/ProfileList (0.69s)

                                                
                                                
=== RUN   TestMultiNode/serial/ProfileList
multinode_test.go:143: (dbg) Run:  out/minikube-linux-arm64 profile list --output json
--- PASS: TestMultiNode/serial/ProfileList (0.69s)

                                                
                                    
x
+
TestMultiNode/serial/CopyFile (9.98s)

                                                
                                                
=== RUN   TestMultiNode/serial/CopyFile
multinode_test.go:184: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status --output json --alsologtostderr
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp testdata/cp-test.txt multinode-286863:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile2187354379/001/cp-test_multinode-286863.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863:/home/docker/cp-test.txt multinode-286863-m02:/home/docker/cp-test_multinode-286863_multinode-286863-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m02 "sudo cat /home/docker/cp-test_multinode-286863_multinode-286863-m02.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863:/home/docker/cp-test.txt multinode-286863-m03:/home/docker/cp-test_multinode-286863_multinode-286863-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m03 "sudo cat /home/docker/cp-test_multinode-286863_multinode-286863-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp testdata/cp-test.txt multinode-286863-m02:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863-m02:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile2187354379/001/cp-test_multinode-286863-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863-m02:/home/docker/cp-test.txt multinode-286863:/home/docker/cp-test_multinode-286863-m02_multinode-286863.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863 "sudo cat /home/docker/cp-test_multinode-286863-m02_multinode-286863.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863-m02:/home/docker/cp-test.txt multinode-286863-m03:/home/docker/cp-test_multinode-286863-m02_multinode-286863-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m03 "sudo cat /home/docker/cp-test_multinode-286863-m02_multinode-286863-m03.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp testdata/cp-test.txt multinode-286863-m03:/home/docker/cp-test.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863-m03:/home/docker/cp-test.txt /tmp/TestMultiNodeserialCopyFile2187354379/001/cp-test_multinode-286863-m03.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863-m03:/home/docker/cp-test.txt multinode-286863:/home/docker/cp-test_multinode-286863-m03_multinode-286863.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863 "sudo cat /home/docker/cp-test_multinode-286863-m03_multinode-286863.txt"
helpers_test.go:556: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 cp multinode-286863-m03:/home/docker/cp-test.txt multinode-286863-m02:/home/docker/cp-test_multinode-286863-m03_multinode-286863-m02.txt
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 ssh -n multinode-286863-m02 "sudo cat /home/docker/cp-test_multinode-286863-m03_multinode-286863-m02.txt"
--- PASS: TestMultiNode/serial/CopyFile (9.98s)

                                                
                                    
x
+
TestMultiNode/serial/StopNode (2.25s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopNode
multinode_test.go:248: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 node stop m03
multinode_test.go:248: (dbg) Done: out/minikube-linux-arm64 -p multinode-286863 node stop m03: (1.220783915s)
multinode_test.go:254: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status
multinode_test.go:254: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-286863 status: exit status 7 (532.819781ms)

                                                
                                                
-- stdout --
	multinode-286863
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-286863-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-286863-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:261: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr
E0317 11:04:17.244756    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:261: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr: exit status 7 (497.186981ms)

                                                
                                                
-- stdout --
	multinode-286863
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-286863-m02
	type: Worker
	host: Running
	kubelet: Running
	
	multinode-286863-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0317 11:04:17.118795  133697 out.go:345] Setting OutFile to fd 1 ...
	I0317 11:04:17.119566  133697 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 11:04:17.119607  133697 out.go:358] Setting ErrFile to fd 2...
	I0317 11:04:17.119627  133697 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 11:04:17.119928  133697 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 11:04:17.120201  133697 out.go:352] Setting JSON to false
	I0317 11:04:17.120270  133697 mustload.go:65] Loading cluster: multinode-286863
	I0317 11:04:17.120305  133697 notify.go:220] Checking for updates...
	I0317 11:04:17.120749  133697 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 11:04:17.120789  133697 status.go:174] checking status of multinode-286863 ...
	I0317 11:04:17.121466  133697 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 11:04:17.140020  133697 status.go:371] multinode-286863 host status = "Running" (err=<nil>)
	I0317 11:04:17.140051  133697 host.go:66] Checking if "multinode-286863" exists ...
	I0317 11:04:17.140337  133697 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863
	I0317 11:04:17.161115  133697 host.go:66] Checking if "multinode-286863" exists ...
	I0317 11:04:17.161405  133697 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 11:04:17.161455  133697 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863
	I0317 11:04:17.184230  133697 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32908 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863/id_rsa Username:docker}
	I0317 11:04:17.271835  133697 ssh_runner.go:195] Run: systemctl --version
	I0317 11:04:17.276106  133697 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0317 11:04:17.287729  133697 cli_runner.go:164] Run: docker system info --format "{{json .}}"
	I0317 11:04:17.344032  133697 info.go:266] docker info: {ID:6ZPO:QZND:VNGE:LUKL:4Y3K:XELL:AAX4:2GTK:E6LM:MPRN:3ZXR:TTMR Containers:3 ContainersRunning:2 ContainersPaused:0 ContainersStopped:1 Images:3 Driver:overlay2 DriverStatus:[[Backing Filesystem extfs] [Supports d_type true] [Using metacopy false] [Native Overlay Diff true] [userxattr false]] SystemStatus:<nil> Plugins:{Volume:[local] Network:[bridge host ipvlan macvlan null overlay] Authorization:<nil> Log:[awslogs fluentd gcplogs gelf journald json-file local splunk syslog]} MemoryLimit:true SwapLimit:true KernelMemory:false KernelMemoryTCP:true CPUCfsPeriod:true CPUCfsQuota:true CPUShares:true CPUSet:true PidsLimit:true IPv4Forwarding:true BridgeNfIptables:false BridgeNfIP6Tables:false Debug:false NFd:49 OomKillDisable:true NGoroutines:62 SystemTime:2025-03-17 11:04:17.335096166 +0000 UTC LoggingDriver:json-file CgroupDriver:cgroupfs NEventsListener:0 KernelVersion:5.15.0-1077-aws OperatingSystem:Ubuntu 20.04.6 LTS OSType:linux Architecture:a
arch64 IndexServerAddress:https://index.docker.io/v1/ RegistryConfig:{AllowNondistributableArtifactsCIDRs:[] AllowNondistributableArtifactsHostnames:[] InsecureRegistryCIDRs:[::1/128 127.0.0.0/8] IndexConfigs:{DockerIo:{Name:docker.io Mirrors:[] Secure:true Official:true}} Mirrors:[]} NCPU:2 MemTotal:8214831104 GenericResources:<nil> DockerRootDir:/var/lib/docker HTTPProxy: HTTPSProxy: NoProxy: Name:ip-172-31-30-239 Labels:[] ExperimentalBuild:false ServerVersion:28.0.1 ClusterStore: ClusterAdvertise: Runtimes:{Runc:{Path:runc}} DefaultRuntime:runc Swarm:{NodeID: NodeAddr: LocalNodeState:inactive ControlAvailable:false Error: RemoteManagers:<nil>} LiveRestoreEnabled:false Isolation: InitBinary:docker-init ContainerdCommit:{ID:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb Expected:bcc810d6b9066471b0b6fa75f557a15a1cbf31bb} RuncCommit:{ID:v1.2.4-0-g6c52b3f Expected:v1.2.4-0-g6c52b3f} InitCommit:{ID:de40ad0 Expected:de40ad0} SecurityOptions:[name=apparmor name=seccomp,profile=builtin] ProductLicense: Warnings:<nil> Se
rverErrors:[] ClientInfo:{Debug:false Plugins:[map[Name:buildx Path:/usr/libexec/docker/cli-plugins/docker-buildx SchemaVersion:0.1.0 ShortDescription:Docker Buildx Vendor:Docker Inc. Version:v0.21.1] map[Name:compose Path:/usr/libexec/docker/cli-plugins/docker-compose SchemaVersion:0.1.0 ShortDescription:Docker Compose Vendor:Docker Inc. Version:v2.33.1]] Warnings:<nil>}}
	I0317 11:04:17.344664  133697 kubeconfig.go:125] found "multinode-286863" server: "https://192.168.67.2:8443"
	I0317 11:04:17.344695  133697 api_server.go:166] Checking apiserver status ...
	I0317 11:04:17.344755  133697 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0317 11:04:17.356237  133697 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/1436/cgroup
	I0317 11:04:17.366192  133697 api_server.go:182] apiserver freezer: "11:freezer:/docker/012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f/kubepods/burstable/pod51e85de8b77310a74a991f28c0c98e7e/0224d523c6c15863a6dbd8bf493c53872bfaa4fd93a2f9bf76cd68b27e00fa20"
	I0317 11:04:17.366261  133697 ssh_runner.go:195] Run: sudo cat /sys/fs/cgroup/freezer/docker/012f3f8578a85374248c4036ba45f78049054406dc573bbb349cc08cb1d1738f/kubepods/burstable/pod51e85de8b77310a74a991f28c0c98e7e/0224d523c6c15863a6dbd8bf493c53872bfaa4fd93a2f9bf76cd68b27e00fa20/freezer.state
	I0317 11:04:17.375628  133697 api_server.go:204] freezer state: "THAWED"
	I0317 11:04:17.375721  133697 api_server.go:253] Checking apiserver healthz at https://192.168.67.2:8443/healthz ...
	I0317 11:04:17.383600  133697 api_server.go:279] https://192.168.67.2:8443/healthz returned 200:
	ok
	I0317 11:04:17.383632  133697 status.go:463] multinode-286863 apiserver status = Running (err=<nil>)
	I0317 11:04:17.383643  133697 status.go:176] multinode-286863 status: &{Name:multinode-286863 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0317 11:04:17.383660  133697 status.go:174] checking status of multinode-286863-m02 ...
	I0317 11:04:17.383967  133697 cli_runner.go:164] Run: docker container inspect multinode-286863-m02 --format={{.State.Status}}
	I0317 11:04:17.401971  133697 status.go:371] multinode-286863-m02 host status = "Running" (err=<nil>)
	I0317 11:04:17.402007  133697 host.go:66] Checking if "multinode-286863-m02" exists ...
	I0317 11:04:17.402320  133697 cli_runner.go:164] Run: docker container inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}},{{.GlobalIPv6Address}}{{end}}" multinode-286863-m02
	I0317 11:04:17.421144  133697 host.go:66] Checking if "multinode-286863-m02" exists ...
	I0317 11:04:17.421498  133697 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0317 11:04:17.421553  133697 cli_runner.go:164] Run: docker container inspect -f "'{{(index (index .NetworkSettings.Ports "22/tcp") 0).HostPort}}'" multinode-286863-m02
	I0317 11:04:17.441221  133697 sshutil.go:53] new ssh client: &{IP:127.0.0.1 Port:32913 SSHKeyPath:/home/jenkins/minikube-integration/20535-2262/.minikube/machines/multinode-286863-m02/id_rsa Username:docker}
	I0317 11:04:17.528179  133697 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0317 11:04:17.539724  133697 status.go:176] multinode-286863-m02 status: &{Name:multinode-286863-m02 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0317 11:04:17.539761  133697 status.go:174] checking status of multinode-286863-m03 ...
	I0317 11:04:17.540085  133697 cli_runner.go:164] Run: docker container inspect multinode-286863-m03 --format={{.State.Status}}
	I0317 11:04:17.560200  133697 status.go:371] multinode-286863-m03 host status = "Stopped" (err=<nil>)
	I0317 11:04:17.560224  133697 status.go:384] host is not running, skipping remaining checks
	I0317 11:04:17.560245  133697 status.go:176] multinode-286863-m03 status: &{Name:multinode-286863-m03 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopNode (2.25s)

                                                
                                    
x
+
TestMultiNode/serial/StartAfterStop (9.34s)

                                                
                                                
=== RUN   TestMultiNode/serial/StartAfterStop
multinode_test.go:282: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 node start m03 -v=7 --alsologtostderr
multinode_test.go:282: (dbg) Done: out/minikube-linux-arm64 -p multinode-286863 node start m03 -v=7 --alsologtostderr: (8.598246632s)
multinode_test.go:290: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status -v=7 --alsologtostderr
multinode_test.go:306: (dbg) Run:  kubectl get nodes
--- PASS: TestMultiNode/serial/StartAfterStop (9.34s)

                                                
                                    
x
+
TestMultiNode/serial/RestartKeepsNodes (79.51s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartKeepsNodes
multinode_test.go:314: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-286863
multinode_test.go:321: (dbg) Run:  out/minikube-linux-arm64 stop -p multinode-286863
multinode_test.go:321: (dbg) Done: out/minikube-linux-arm64 stop -p multinode-286863: (24.981503593s)
multinode_test.go:326: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-286863 --wait=true -v=8 --alsologtostderr
E0317 11:05:19.262473    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:326: (dbg) Done: out/minikube-linux-arm64 start -p multinode-286863 --wait=true -v=8 --alsologtostderr: (54.398360089s)
multinode_test.go:331: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-286863
--- PASS: TestMultiNode/serial/RestartKeepsNodes (79.51s)

                                                
                                    
x
+
TestMultiNode/serial/DeleteNode (5.25s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeleteNode
multinode_test.go:416: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 node delete m03
multinode_test.go:416: (dbg) Done: out/minikube-linux-arm64 -p multinode-286863 node delete m03: (4.586767282s)
multinode_test.go:422: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr
multinode_test.go:436: (dbg) Run:  kubectl get nodes
multinode_test.go:444: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiNode/serial/DeleteNode (5.25s)

                                                
                                    
x
+
TestMultiNode/serial/StopMultiNode (23.99s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopMultiNode
multinode_test.go:345: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 stop
multinode_test.go:345: (dbg) Done: out/minikube-linux-arm64 -p multinode-286863 stop: (23.791620467s)
multinode_test.go:351: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status
multinode_test.go:351: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-286863 status: exit status 7 (100.762376ms)

                                                
                                                
-- stdout --
	multinode-286863
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-286863-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:358: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr
multinode_test.go:358: (dbg) Non-zero exit: out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr: exit status 7 (92.176194ms)

                                                
                                                
-- stdout --
	multinode-286863
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	
	multinode-286863-m02
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0317 11:06:15.602304  141791 out.go:345] Setting OutFile to fd 1 ...
	I0317 11:06:15.602535  141791 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 11:06:15.602562  141791 out.go:358] Setting ErrFile to fd 2...
	I0317 11:06:15.602581  141791 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0317 11:06:15.602997  141791 root.go:338] Updating PATH: /home/jenkins/minikube-integration/20535-2262/.minikube/bin
	I0317 11:06:15.603305  141791 out.go:352] Setting JSON to false
	I0317 11:06:15.603357  141791 mustload.go:65] Loading cluster: multinode-286863
	I0317 11:06:15.604074  141791 config.go:182] Loaded profile config "multinode-286863": Driver=docker, ContainerRuntime=containerd, KubernetesVersion=v1.32.2
	I0317 11:06:15.604120  141791 status.go:174] checking status of multinode-286863 ...
	I0317 11:06:15.604819  141791 cli_runner.go:164] Run: docker container inspect multinode-286863 --format={{.State.Status}}
	I0317 11:06:15.606445  141791 notify.go:220] Checking for updates...
	I0317 11:06:15.623408  141791 status.go:371] multinode-286863 host status = "Stopped" (err=<nil>)
	I0317 11:06:15.623430  141791 status.go:384] host is not running, skipping remaining checks
	I0317 11:06:15.623437  141791 status.go:176] multinode-286863 status: &{Name:multinode-286863 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0317 11:06:15.623469  141791 status.go:174] checking status of multinode-286863-m02 ...
	I0317 11:06:15.623766  141791 cli_runner.go:164] Run: docker container inspect multinode-286863-m02 --format={{.State.Status}}
	I0317 11:06:15.644623  141791 status.go:371] multinode-286863-m02 host status = "Stopped" (err=<nil>)
	I0317 11:06:15.644647  141791 status.go:384] host is not running, skipping remaining checks
	I0317 11:06:15.644655  141791 status.go:176] multinode-286863-m02 status: &{Name:multinode-286863-m02 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
--- PASS: TestMultiNode/serial/StopMultiNode (23.99s)

                                                
                                    
x
+
TestMultiNode/serial/RestartMultiNode (49.66s)

                                                
                                                
=== RUN   TestMultiNode/serial/RestartMultiNode
multinode_test.go:376: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-286863 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd
multinode_test.go:376: (dbg) Done: out/minikube-linux-arm64 start -p multinode-286863 --wait=true -v=8 --alsologtostderr --driver=docker  --container-runtime=containerd: (49.002056474s)
multinode_test.go:382: (dbg) Run:  out/minikube-linux-arm64 -p multinode-286863 status --alsologtostderr
multinode_test.go:396: (dbg) Run:  kubectl get nodes
multinode_test.go:404: (dbg) Run:  kubectl get nodes -o "go-template='{{range .items}}{{range .status.conditions}}{{if eq .type "Ready"}} {{.status}}{{"\n"}}{{end}}{{end}}{{end}}'"
--- PASS: TestMultiNode/serial/RestartMultiNode (49.66s)

                                                
                                    
x
+
TestMultiNode/serial/ValidateNameConflict (33.15s)

                                                
                                                
=== RUN   TestMultiNode/serial/ValidateNameConflict
multinode_test.go:455: (dbg) Run:  out/minikube-linux-arm64 node list -p multinode-286863
multinode_test.go:464: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-286863-m02 --driver=docker  --container-runtime=containerd
multinode_test.go:464: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p multinode-286863-m02 --driver=docker  --container-runtime=containerd: exit status 14 (94.741087ms)

                                                
                                                
-- stdout --
	* [multinode-286863-m02] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=20535
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	! Profile name 'multinode-286863-m02' is duplicated with machine name 'multinode-286863-m02' in profile 'multinode-286863'
	X Exiting due to MK_USAGE: Profile name should be unique

                                                
                                                
** /stderr **
multinode_test.go:472: (dbg) Run:  out/minikube-linux-arm64 start -p multinode-286863-m03 --driver=docker  --container-runtime=containerd
E0317 11:07:20.310998    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
multinode_test.go:472: (dbg) Done: out/minikube-linux-arm64 start -p multinode-286863-m03 --driver=docker  --container-runtime=containerd: (30.628452214s)
multinode_test.go:479: (dbg) Run:  out/minikube-linux-arm64 node add -p multinode-286863
multinode_test.go:479: (dbg) Non-zero exit: out/minikube-linux-arm64 node add -p multinode-286863: exit status 80 (437.694105ms)

                                                
                                                
-- stdout --
	* Adding node m03 to cluster multinode-286863 as [worker]
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to GUEST_NODE_ADD: failed to add node: Node multinode-286863-m03 already exists in multinode-286863-m03 profile
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│    * Please also attach the following file to the GitHub issue:                             │
	│    * - /tmp/minikube_node_040ea7097fd6ed71e65be9a474587f81f0ccd21d_0.log                    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
multinode_test.go:484: (dbg) Run:  out/minikube-linux-arm64 delete -p multinode-286863-m03
multinode_test.go:484: (dbg) Done: out/minikube-linux-arm64 delete -p multinode-286863-m03: (1.950565987s)
--- PASS: TestMultiNode/serial/ValidateNameConflict (33.15s)

                                                
                                    
x
+
TestPreload (110.09s)

                                                
                                                
=== RUN   TestPreload
preload_test.go:44: (dbg) Run:  out/minikube-linux-arm64 start -p test-preload-607005 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.24.4
preload_test.go:44: (dbg) Done: out/minikube-linux-arm64 start -p test-preload-607005 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=docker  --container-runtime=containerd --kubernetes-version=v1.24.4: (1m14.623342153s)
preload_test.go:52: (dbg) Run:  out/minikube-linux-arm64 -p test-preload-607005 image pull gcr.io/k8s-minikube/busybox
preload_test.go:52: (dbg) Done: out/minikube-linux-arm64 -p test-preload-607005 image pull gcr.io/k8s-minikube/busybox: (1.943486706s)
preload_test.go:58: (dbg) Run:  out/minikube-linux-arm64 stop -p test-preload-607005
preload_test.go:58: (dbg) Done: out/minikube-linux-arm64 stop -p test-preload-607005: (12.066191604s)
preload_test.go:66: (dbg) Run:  out/minikube-linux-arm64 start -p test-preload-607005 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=docker  --container-runtime=containerd
E0317 11:09:17.244732    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
preload_test.go:66: (dbg) Done: out/minikube-linux-arm64 start -p test-preload-607005 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=docker  --container-runtime=containerd: (18.595282691s)
preload_test.go:71: (dbg) Run:  out/minikube-linux-arm64 -p test-preload-607005 image list
helpers_test.go:175: Cleaning up "test-preload-607005" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p test-preload-607005
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p test-preload-607005: (2.518607775s)
--- PASS: TestPreload (110.09s)

                                                
                                    
x
+
TestScheduledStopUnix (112.84s)

                                                
                                                
=== RUN   TestScheduledStopUnix
scheduled_stop_test.go:128: (dbg) Run:  out/minikube-linux-arm64 start -p scheduled-stop-108177 --memory=2048 --driver=docker  --container-runtime=containerd
scheduled_stop_test.go:128: (dbg) Done: out/minikube-linux-arm64 start -p scheduled-stop-108177 --memory=2048 --driver=docker  --container-runtime=containerd: (35.865830925s)
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-108177 --schedule 5m
scheduled_stop_test.go:191: (dbg) Run:  out/minikube-linux-arm64 status --format={{.TimeToStop}} -p scheduled-stop-108177 -n scheduled-stop-108177
scheduled_stop_test.go:169: signal error was:  <nil>
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-108177 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
I0317 11:10:11.593147    7572 retry.go:31] will retry after 53.895µs: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.594592    7572 retry.go:31] will retry after 153.643µs: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.594973    7572 retry.go:31] will retry after 261.393µs: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.596102    7572 retry.go:31] will retry after 434.208µs: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.597030    7572 retry.go:31] will retry after 492.016µs: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.598399    7572 retry.go:31] will retry after 840.374µs: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.599549    7572 retry.go:31] will retry after 1.288126ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.601854    7572 retry.go:31] will retry after 1.568658ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.604050    7572 retry.go:31] will retry after 2.352158ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.606634    7572 retry.go:31] will retry after 4.714936ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.611959    7572 retry.go:31] will retry after 7.022642ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.620680    7572 retry.go:31] will retry after 12.121662ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.632960    7572 retry.go:31] will retry after 17.485345ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.651202    7572 retry.go:31] will retry after 17.742121ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
I0317 11:10:11.669421    7572 retry.go:31] will retry after 27.826609ms: open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/scheduled-stop-108177/pid: no such file or directory
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-108177 --cancel-scheduled
E0317 11:10:19.262726    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-108177 -n scheduled-stop-108177
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-108177
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-linux-arm64 stop -p scheduled-stop-108177 --schedule 15s
scheduled_stop_test.go:169: signal error was:  os: process already finished
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-linux-arm64 status -p scheduled-stop-108177
scheduled_stop_test.go:205: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p scheduled-stop-108177: exit status 7 (72.75986ms)

                                                
                                                
-- stdout --
	scheduled-stop-108177
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-108177 -n scheduled-stop-108177
scheduled_stop_test.go:176: (dbg) Non-zero exit: out/minikube-linux-arm64 status --format={{.Host}} -p scheduled-stop-108177 -n scheduled-stop-108177: exit status 7 (63.958521ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: status error: exit status 7 (may be ok)
helpers_test.go:175: Cleaning up "scheduled-stop-108177" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p scheduled-stop-108177
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p scheduled-stop-108177: (5.453976346s)
--- PASS: TestScheduledStopUnix (112.84s)

                                                
                                    
x
+
TestInsufficientStorage (10.27s)

                                                
                                                
=== RUN   TestInsufficientStorage
status_test.go:50: (dbg) Run:  out/minikube-linux-arm64 start -p insufficient-storage-276608 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=containerd
status_test.go:50: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p insufficient-storage-276608 --memory=2048 --output=json --wait=true --driver=docker  --container-runtime=containerd: exit status 26 (7.788071307s)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"4948ed2f-0403-4170-aeab-f8db51e8e441","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[insufficient-storage-276608] minikube v1.35.0 on Ubuntu 20.04 (arm64)","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"dd8e65c6-d30d-4cd9-b135-cfa8af187448","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=20535"}}
	{"specversion":"1.0","id":"b17e3480-01b0-4e0e-99fc-257be00a32d5","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"bddf054f-eb5d-4013-92c7-d05b6b0027be","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig"}}
	{"specversion":"1.0","id":"d14890d3-74ce-422a-ac32-6a772f642136","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube"}}
	{"specversion":"1.0","id":"304591f9-355c-4cbf-86da-8d60370f4468","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_BIN=out/minikube-linux-arm64"}}
	{"specversion":"1.0","id":"9351e556-32c8-4b12-a90c-c441400f7384","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"ce0f397c-f5d0-4549-8766-0f361d539b36","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_STORAGE_CAPACITY=100"}}
	{"specversion":"1.0","id":"5ed61a39-4005-40d3-b2a2-8950e328fd83","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_TEST_AVAILABLE_STORAGE=19"}}
	{"specversion":"1.0","id":"95d7845e-a0fb-423e-bba5-59be49e7db46","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"1","message":"Using the docker driver based on user configuration","name":"Selecting Driver","totalsteps":"19"}}
	{"specversion":"1.0","id":"0e57a68f-8a1f-4bdf-a94d-a51344dd4388","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"Using Docker driver with root privileges"}}
	{"specversion":"1.0","id":"0baa6155-f74c-4133-9edc-923f721b36e7","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"3","message":"Starting \"insufficient-storage-276608\" primary control-plane node in \"insufficient-storage-276608\" cluster","name":"Starting Node","totalsteps":"19"}}
	{"specversion":"1.0","id":"6c0776a8-a6a7-42b1-b679-d6a37498ede7","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"5","message":"Pulling base image v0.0.46-1741860993-20523 ...","name":"Pulling Base Image","totalsteps":"19"}}
	{"specversion":"1.0","id":"c48d38d5-5727-43c7-ab4a-8688d0ba5b8d","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"8","message":"Creating docker container (CPUs=2, Memory=2048MB) ...","name":"Creating Container","totalsteps":"19"}}
	{"specversion":"1.0","id":"c7964041-f517-4366-87a3-ba5d3de91fa7","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"Try one or more of the following to free up space on the device:\n\t\n\t\t\t1. Run \"docker system prune\" to remove unused Docker data (optionally with \"-a\")\n\t\t\t2. Increase the storage allocated to Docker for Desktop by clicking on:\n\t\t\t\tDocker icon \u003e Preferences \u003e Resources \u003e Disk Image Size\n\t\t\t3. Run \"minikube ssh -- docker system prune\" if using the Docker container runtime","exitcode":"26","issues":"https://github.com/kubernetes/minikube/issues/9024","message":"Docker is out of disk space! (/var is at 100% of capacity). You can pass '--force' to skip this check.","name":"RSRC_DOCKER_STORAGE","url":""}}

                                                
                                                
-- /stdout --
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-276608 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-276608 --output=json --layout=cluster: exit status 7 (282.734803ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-276608","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","Step":"Creating Container","StepDetail":"Creating docker container (CPUs=2, Memory=2048MB) ...","BinaryVersion":"v1.35.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-276608","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0317 11:11:36.138671  160775 status.go:458] kubeconfig endpoint: get endpoint: "insufficient-storage-276608" does not appear in /home/jenkins/minikube-integration/20535-2262/kubeconfig

                                                
                                                
** /stderr **
status_test.go:76: (dbg) Run:  out/minikube-linux-arm64 status -p insufficient-storage-276608 --output=json --layout=cluster
status_test.go:76: (dbg) Non-zero exit: out/minikube-linux-arm64 status -p insufficient-storage-276608 --output=json --layout=cluster: exit status 7 (271.997719ms)

                                                
                                                
-- stdout --
	{"Name":"insufficient-storage-276608","StatusCode":507,"StatusName":"InsufficientStorage","StatusDetail":"/var is almost out of disk space","BinaryVersion":"v1.35.0","Components":{"kubeconfig":{"Name":"kubeconfig","StatusCode":500,"StatusName":"Error"}},"Nodes":[{"Name":"insufficient-storage-276608","StatusCode":507,"StatusName":"InsufficientStorage","Components":{"apiserver":{"Name":"apiserver","StatusCode":405,"StatusName":"Stopped"},"kubelet":{"Name":"kubelet","StatusCode":405,"StatusName":"Stopped"}}}]}

                                                
                                                
-- /stdout --
** stderr ** 
	E0317 11:11:36.409728  160838 status.go:458] kubeconfig endpoint: get endpoint: "insufficient-storage-276608" does not appear in /home/jenkins/minikube-integration/20535-2262/kubeconfig
	E0317 11:11:36.419940  160838 status.go:258] unable to read event log: stat: stat /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/insufficient-storage-276608/events.json: no such file or directory

                                                
                                                
** /stderr **
helpers_test.go:175: Cleaning up "insufficient-storage-276608" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p insufficient-storage-276608
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p insufficient-storage-276608: (1.922266966s)
--- PASS: TestInsufficientStorage (10.27s)

                                                
                                    
x
+
TestRunningBinaryUpgrade (82.05s)

                                                
                                                
=== RUN   TestRunningBinaryUpgrade
=== PAUSE TestRunningBinaryUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestRunningBinaryUpgrade
version_upgrade_test.go:120: (dbg) Run:  /tmp/minikube-v1.26.0.1942876086 start -p running-upgrade-654488 --memory=2200 --vm-driver=docker  --container-runtime=containerd
version_upgrade_test.go:120: (dbg) Done: /tmp/minikube-v1.26.0.1942876086 start -p running-upgrade-654488 --memory=2200 --vm-driver=docker  --container-runtime=containerd: (44.554304383s)
version_upgrade_test.go:130: (dbg) Run:  out/minikube-linux-arm64 start -p running-upgrade-654488 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:130: (dbg) Done: out/minikube-linux-arm64 start -p running-upgrade-654488 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (34.497328845s)
helpers_test.go:175: Cleaning up "running-upgrade-654488" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p running-upgrade-654488
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p running-upgrade-654488: (2.360308212s)
--- PASS: TestRunningBinaryUpgrade (82.05s)

                                                
                                    
x
+
TestKubernetesUpgrade (347.22s)

                                                
                                                
=== RUN   TestKubernetesUpgrade
=== PAUSE TestKubernetesUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestKubernetesUpgrade
version_upgrade_test.go:222: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:222: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.20.0 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (1m0.713782952s)
version_upgrade_test.go:227: (dbg) Run:  out/minikube-linux-arm64 stop -p kubernetes-upgrade-426452
version_upgrade_test.go:227: (dbg) Done: out/minikube-linux-arm64 stop -p kubernetes-upgrade-426452: (1.56405692s)
version_upgrade_test.go:232: (dbg) Run:  out/minikube-linux-arm64 -p kubernetes-upgrade-426452 status --format={{.Host}}
version_upgrade_test.go:232: (dbg) Non-zero exit: out/minikube-linux-arm64 -p kubernetes-upgrade-426452 status --format={{.Host}}: exit status 7 (84.387561ms)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
version_upgrade_test.go:234: status error: exit status 7 (may be ok)
version_upgrade_test.go:243: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.32.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
E0317 11:14:17.243847    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/addons-574058/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:243: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.32.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (4m36.281569912s)
version_upgrade_test.go:248: (dbg) Run:  kubectl --context kubernetes-upgrade-426452 version --output=json
version_upgrade_test.go:267: Attempting to downgrade Kubernetes (should fail)
version_upgrade_test.go:269: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.20.0 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:269: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.20.0 --driver=docker  --container-runtime=containerd: exit status 106 (102.58145ms)

                                                
                                                
-- stdout --
	* [kubernetes-upgrade-426452] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=20535
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to K8S_DOWNGRADE_UNSUPPORTED: Unable to safely downgrade existing Kubernetes v1.32.2 cluster to v1.20.0
	* Suggestion: 
	
	    1) Recreate the cluster with Kubernetes 1.20.0, by running:
	    
	    minikube delete -p kubernetes-upgrade-426452
	    minikube start -p kubernetes-upgrade-426452 --kubernetes-version=v1.20.0
	    
	    2) Create a second cluster with Kubernetes 1.20.0, by running:
	    
	    minikube start -p kubernetes-upgrade-4264522 --kubernetes-version=v1.20.0
	    
	    3) Use the existing cluster at version Kubernetes 1.32.2, by running:
	    
	    minikube start -p kubernetes-upgrade-426452 --kubernetes-version=v1.32.2
	    

                                                
                                                
** /stderr **
version_upgrade_test.go:273: Attempting restart after unsuccessful downgrade
version_upgrade_test.go:275: (dbg) Run:  out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.32.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:275: (dbg) Done: out/minikube-linux-arm64 start -p kubernetes-upgrade-426452 --memory=2200 --kubernetes-version=v1.32.2 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (6.057163528s)
helpers_test.go:175: Cleaning up "kubernetes-upgrade-426452" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p kubernetes-upgrade-426452
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p kubernetes-upgrade-426452: (2.266482332s)
--- PASS: TestKubernetesUpgrade (347.22s)

                                                
                                    
x
+
TestMissingContainerUpgrade (177.13s)

                                                
                                                
=== RUN   TestMissingContainerUpgrade
=== PAUSE TestMissingContainerUpgrade

                                                
                                                

                                                
                                                
=== CONT  TestMissingContainerUpgrade
version_upgrade_test.go:309: (dbg) Run:  /tmp/minikube-v1.26.0.3326339844 start -p missing-upgrade-176439 --memory=2200 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:309: (dbg) Done: /tmp/minikube-v1.26.0.3326339844 start -p missing-upgrade-176439 --memory=2200 --driver=docker  --container-runtime=containerd: (1m35.740632151s)
version_upgrade_test.go:318: (dbg) Run:  docker stop missing-upgrade-176439
E0317 11:13:22.330488    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:318: (dbg) Done: docker stop missing-upgrade-176439: (10.331191953s)
version_upgrade_test.go:323: (dbg) Run:  docker rm missing-upgrade-176439
version_upgrade_test.go:329: (dbg) Run:  out/minikube-linux-arm64 start -p missing-upgrade-176439 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:329: (dbg) Done: out/minikube-linux-arm64 start -p missing-upgrade-176439 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (1m7.727558252s)
helpers_test.go:175: Cleaning up "missing-upgrade-176439" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p missing-upgrade-176439
helpers_test.go:178: (dbg) Done: out/minikube-linux-arm64 delete -p missing-upgrade-176439: (2.453237025s)
--- PASS: TestMissingContainerUpgrade (177.13s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoK8sWithVersion (0.11s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoK8sWithVersion
no_kubernetes_test.go:83: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-225260 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:83: (dbg) Non-zero exit: out/minikube-linux-arm64 start -p NoKubernetes-225260 --no-kubernetes --kubernetes-version=1.20 --driver=docker  --container-runtime=containerd: exit status 14 (104.958735ms)

                                                
                                                
-- stdout --
	* [NoKubernetes-225260] minikube v1.35.0 on Ubuntu 20.04 (arm64)
	  - MINIKUBE_LOCATION=20535
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	  - KUBECONFIG=/home/jenkins/minikube-integration/20535-2262/kubeconfig
	  - MINIKUBE_HOME=/home/jenkins/minikube-integration/20535-2262/.minikube
	  - MINIKUBE_BIN=out/minikube-linux-arm64
	  - MINIKUBE_FORCE_SYSTEMD=
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to MK_USAGE: cannot specify --kubernetes-version with --no-kubernetes,
	to unset a global config run:
	
	$ minikube config unset kubernetes-version

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/StartNoK8sWithVersion (0.11s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithK8s (40.13s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithK8s
no_kubernetes_test.go:95: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-225260 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:95: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-225260 --driver=docker  --container-runtime=containerd: (39.780733573s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-225260 status -o json
--- PASS: TestNoKubernetes/serial/StartWithK8s (40.13s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithStopK8s (17.73s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithStopK8s
no_kubernetes_test.go:112: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-225260 --no-kubernetes --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:112: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-225260 --no-kubernetes --driver=docker  --container-runtime=containerd: (15.383185226s)
no_kubernetes_test.go:200: (dbg) Run:  out/minikube-linux-arm64 -p NoKubernetes-225260 status -o json
no_kubernetes_test.go:200: (dbg) Non-zero exit: out/minikube-linux-arm64 -p NoKubernetes-225260 status -o json: exit status 2 (291.376139ms)

                                                
                                                
-- stdout --
	{"Name":"NoKubernetes-225260","Host":"Running","Kubelet":"Stopped","APIServer":"Stopped","Kubeconfig":"Configured","Worker":false}

                                                
                                                
-- /stdout --
no_kubernetes_test.go:124: (dbg) Run:  out/minikube-linux-arm64 delete -p NoKubernetes-225260
no_kubernetes_test.go:124: (dbg) Done: out/minikube-linux-arm64 delete -p NoKubernetes-225260: (2.053576537s)
--- PASS: TestNoKubernetes/serial/StartWithStopK8s (17.73s)

                                                
                                    
x
+
TestNoKubernetes/serial/Start (5.48s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Start
no_kubernetes_test.go:136: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-225260 --no-kubernetes --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:136: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-225260 --no-kubernetes --driver=docker  --container-runtime=containerd: (5.477871147s)
--- PASS: TestNoKubernetes/serial/Start (5.48s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunning (0.27s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunning
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-225260 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-225260 "sudo systemctl is-active --quiet service kubelet": exit status 1 (271.026109ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunning (0.27s)

                                                
                                    
x
+
TestNoKubernetes/serial/ProfileList (0.99s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/ProfileList
no_kubernetes_test.go:169: (dbg) Run:  out/minikube-linux-arm64 profile list
no_kubernetes_test.go:179: (dbg) Run:  out/minikube-linux-arm64 profile list --output=json
--- PASS: TestNoKubernetes/serial/ProfileList (0.99s)

                                                
                                    
x
+
TestNoKubernetes/serial/Stop (1.23s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/Stop
no_kubernetes_test.go:158: (dbg) Run:  out/minikube-linux-arm64 stop -p NoKubernetes-225260
no_kubernetes_test.go:158: (dbg) Done: out/minikube-linux-arm64 stop -p NoKubernetes-225260: (1.231855954s)
--- PASS: TestNoKubernetes/serial/Stop (1.23s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoArgs (6.92s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoArgs
no_kubernetes_test.go:191: (dbg) Run:  out/minikube-linux-arm64 start -p NoKubernetes-225260 --driver=docker  --container-runtime=containerd
no_kubernetes_test.go:191: (dbg) Done: out/minikube-linux-arm64 start -p NoKubernetes-225260 --driver=docker  --container-runtime=containerd: (6.917307685s)
--- PASS: TestNoKubernetes/serial/StartNoArgs (6.92s)

                                                
                                    
x
+
TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.3s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/VerifyK8sNotRunningSecond
no_kubernetes_test.go:147: (dbg) Run:  out/minikube-linux-arm64 ssh -p NoKubernetes-225260 "sudo systemctl is-active --quiet service kubelet"
no_kubernetes_test.go:147: (dbg) Non-zero exit: out/minikube-linux-arm64 ssh -p NoKubernetes-225260 "sudo systemctl is-active --quiet service kubelet": exit status 1 (304.494335ms)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/VerifyK8sNotRunningSecond (0.30s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Setup (0.68s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Setup
--- PASS: TestStoppedBinaryUpgrade/Setup (0.68s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/Upgrade (107.81s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/Upgrade
version_upgrade_test.go:183: (dbg) Run:  /tmp/minikube-v1.26.0.327291493 start -p stopped-upgrade-986028 --memory=2200 --vm-driver=docker  --container-runtime=containerd
E0317 11:15:19.262538    7572 cert_rotation.go:171] "Unhandled Error" err="key failed with : open /home/jenkins/minikube-integration/20535-2262/.minikube/profiles/functional-703463/client.crt: no such file or directory" logger="UnhandledError"
version_upgrade_test.go:183: (dbg) Done: /tmp/minikube-v1.26.0.327291493 start -p stopped-upgrade-986028 --memory=2200 --vm-driver=docker  --container-runtime=containerd: (45.182802887s)
version_upgrade_test.go:192: (dbg) Run:  /tmp/minikube-v1.26.0.327291493 -p stopped-upgrade-986028 stop
version_upgrade_test.go:192: (dbg) Done: /tmp/minikube-v1.26.0.327291493 -p stopped-upgrade-986028 stop: (19.945774697s)
version_upgrade_test.go:198: (dbg) Run:  out/minikube-linux-arm64 start -p stopped-upgrade-986028 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd
version_upgrade_test.go:198: (dbg) Done: out/minikube-linux-arm64 start -p stopped-upgrade-986028 --memory=2200 --alsologtostderr -v=1 --driver=docker  --container-runtime=containerd: (42.683993566s)
--- PASS: TestStoppedBinaryUpgrade/Upgrade (107.81s)

                                                
                                    
x
+
TestStoppedBinaryUpgrade/MinikubeLogs (1.05s)

                                                
                                                
=== RUN   TestStoppedBinaryUpgrade/MinikubeLogs
version_upgrade_test.go:206: (dbg) Run:  out/minikube-linux-arm64 logs -p stopped-upgrade-986028
version_upgrade_test.go:206: (dbg) Done: out/minikube-linux-arm64 logs -p stopped-upgrade-986028: (1.051921606s)
--- PASS: TestStoppedBinaryUpgrade/MinikubeLogs (1.05s)

                                                
                                    

Test skip (27/226)

x
+
TestDownloadOnly/v1.20.0/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.20.0/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.20.0/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.20.0/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.32.2/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.32.2/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/kubectl
aaa_download_only_test.go:167: Test for darwin and windows
--- SKIP: TestDownloadOnly/v1.32.2/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnlyKic (0.57s)

                                                
                                                
=== RUN   TestDownloadOnlyKic
aaa_download_only_test.go:232: (dbg) Run:  out/minikube-linux-arm64 start --download-only -p download-docker-705677 --alsologtostderr --driver=docker  --container-runtime=containerd
aaa_download_only_test.go:244: Skip for arm64 platform. See https://github.com/kubernetes/minikube/issues/10144
helpers_test.go:175: Cleaning up "download-docker-705677" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-linux-arm64 delete -p download-docker-705677
--- SKIP: TestDownloadOnlyKic (0.57s)

                                                
                                    
x
+
TestOffline (0s)

                                                
                                                
=== RUN   TestOffline
=== PAUSE TestOffline

                                                
                                                

                                                
                                                
=== CONT  TestOffline
aab_offline_test.go:35: skipping TestOffline - only docker runtime supported on arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestOffline (0.00s)

                                                
                                    
x
+
TestAddons/serial/GCPAuth/RealCredentials (0.01s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/RealCredentials
addons_test.go:698: This test requires a GCE instance (excluding Cloud Shell) with a container based driver
--- SKIP: TestAddons/serial/GCPAuth/RealCredentials (0.01s)

                                                
                                    
x
+
TestAddons/parallel/Olm (0s)

                                                
                                                
=== RUN   TestAddons/parallel/Olm
=== PAUSE TestAddons/parallel/Olm

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:422: Skipping OLM addon test until https://github.com/operator-framework/operator-lifecycle-manager/issues/2534 is resolved
--- SKIP: TestAddons/parallel/Olm (0.00s)

                                                
                                    
x
+
TestAddons/parallel/AmdGpuDevicePlugin (0s)

                                                
                                                
=== RUN   TestAddons/parallel/AmdGpuDevicePlugin
=== PAUSE TestAddons/parallel/AmdGpuDevicePlugin

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/AmdGpuDevicePlugin
addons_test.go:972: skip amd gpu test on all but docker driver and amd64 platform
--- SKIP: TestAddons/parallel/AmdGpuDevicePlugin (0.00s)

                                                
                                    
x
+
TestDockerFlags (0s)

                                                
                                                
=== RUN   TestDockerFlags
docker_test.go:41: skipping: only runs with docker container runtime, currently testing containerd
--- SKIP: TestDockerFlags (0.00s)

                                                
                                    
x
+
TestKVMDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestKVMDriverInstallOrUpdate
driver_install_or_update_test.go:45: Skip if arm64. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestKVMDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperKitDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestHyperKitDriverInstallOrUpdate
driver_install_or_update_test.go:105: Skip if not darwin.
--- SKIP: TestHyperKitDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperkitDriverSkipUpgrade (0s)

                                                
                                                
=== RUN   TestHyperkitDriverSkipUpgrade
driver_install_or_update_test.go:169: Skip if not darwin.
--- SKIP: TestHyperkitDriverSkipUpgrade (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/MySQL (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/MySQL
=== PAUSE TestFunctional/parallel/MySQL

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1804: arm64 is not supported by mysql. Skip the test. See https://github.com/kubernetes/minikube/issues/10144
--- SKIP: TestFunctional/parallel/MySQL (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DockerEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/DockerEnv
=== PAUSE TestFunctional/parallel/DockerEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DockerEnv
functional_test.go:480: only validate docker env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/DockerEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/PodmanEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/PodmanEnv
=== PAUSE TestFunctional/parallel/PodmanEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PodmanEnv
functional_test.go:567: only validate podman env with docker container runtime, currently testing containerd
--- SKIP: TestFunctional/parallel/PodmanEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0.00s)

                                                
                                    
x
+
TestFunctionalNewestKubernetes (0s)

                                                
                                                
=== RUN   TestFunctionalNewestKubernetes
functional_test.go:84: 
--- SKIP: TestFunctionalNewestKubernetes (0.00s)

                                                
                                    
x
+
TestGvisorAddon (0s)

                                                
                                                
=== RUN   TestGvisorAddon
gvisor_addon_test.go:34: skipping test because --gvisor=false
--- SKIP: TestGvisorAddon (0.00s)

                                                
                                    
x
+
TestImageBuild (0s)

                                                
                                                
=== RUN   TestImageBuild
image_test.go:33: 
--- SKIP: TestImageBuild (0.00s)

                                                
                                    
x
+
TestChangeNoneUser (0s)

                                                
                                                
=== RUN   TestChangeNoneUser
none_test.go:38: Test requires none driver and SUDO_USER env to not be empty
--- SKIP: TestChangeNoneUser (0.00s)

                                                
                                    
x
+
TestScheduledStopWindows (0s)

                                                
                                                
=== RUN   TestScheduledStopWindows
scheduled_stop_test.go:42: test only runs on windows
--- SKIP: TestScheduledStopWindows (0.00s)

                                                
                                    
x
+
TestSkaffold (0s)

                                                
                                                
=== RUN   TestSkaffold
skaffold_test.go:45: skaffold requires docker-env, currently testing containerd container runtime
--- SKIP: TestSkaffold (0.00s)

                                                
                                    
Copied to clipboard